[ 562.291117] env[63279]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=63279) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 562.291457] env[63279]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=63279) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 562.291567] env[63279]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=63279) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 562.291912] env[63279]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 562.388329] env[63279]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=63279) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 562.398646] env[63279]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=63279) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 562.999806] env[63279]: INFO nova.virt.driver [None req-389d8177-73e5-4145-90d1-8892efe75c48 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 563.070015] env[63279]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.070207] env[63279]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.070277] env[63279]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=63279) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 566.186920] env[63279]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-9120eec0-25ef-407b-86be-8f5de64d59e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.202833] env[63279]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=63279) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 566.202977] env[63279]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-8e37da66-3488-4e15-ac1c-d4a47d499451 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.235911] env[63279]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 07f1f. [ 566.236049] env[63279]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.166s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.236622] env[63279]: INFO nova.virt.vmwareapi.driver [None req-389d8177-73e5-4145-90d1-8892efe75c48 None None] VMware vCenter version: 7.0.3 [ 566.240014] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-400ce706-25ac-45c3-9e2d-1676c18a66f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.260209] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f91937-2a20-42dc-a4db-2faa77e1e4a2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.265971] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73e6088-46fd-4935-ac01-881345583031 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.272612] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3cf396c-44af-4d45-b96b-cf7fbb7b1230 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.286226] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c269b0-3ac0-4360-ae73-bf328ee62ffc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.292042] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9888264-c96b-4104-9a5b-bcc6a07a855b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.321404] env[63279]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-7fb862cf-7165-4d25-af05-31cb7d7a39f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.326805] env[63279]: DEBUG nova.virt.vmwareapi.driver [None req-389d8177-73e5-4145-90d1-8892efe75c48 None None] Extension org.openstack.compute already exists. {{(pid=63279) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 566.329560] env[63279]: INFO nova.compute.provider_config [None req-389d8177-73e5-4145-90d1-8892efe75c48 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 566.832700] env[63279]: DEBUG nova.context [None req-389d8177-73e5-4145-90d1-8892efe75c48 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),1c4f3b80-350d-4718-a7ef-86b844869563(cell1) {{(pid=63279) load_cells /opt/stack/nova/nova/context.py:464}} [ 566.835007] env[63279]: DEBUG oslo_concurrency.lockutils [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.835259] env[63279]: DEBUG oslo_concurrency.lockutils [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.835967] env[63279]: DEBUG oslo_concurrency.lockutils [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.836571] env[63279]: DEBUG oslo_concurrency.lockutils [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Acquiring lock "1c4f3b80-350d-4718-a7ef-86b844869563" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.836784] env[63279]: DEBUG oslo_concurrency.lockutils [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Lock "1c4f3b80-350d-4718-a7ef-86b844869563" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.837826] env[63279]: DEBUG oslo_concurrency.lockutils [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Lock "1c4f3b80-350d-4718-a7ef-86b844869563" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.859735] env[63279]: INFO dbcounter [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Registered counter for database nova_cell0 [ 566.868371] env[63279]: INFO dbcounter [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Registered counter for database nova_cell1 [ 566.871670] env[63279]: DEBUG oslo_db.sqlalchemy.engines [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63279) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 566.872326] env[63279]: DEBUG oslo_db.sqlalchemy.engines [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63279) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 566.877064] env[63279]: ERROR nova.db.main.api [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 566.877064] env[63279]: result = function(*args, **kwargs) [ 566.877064] env[63279]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 566.877064] env[63279]: return func(*args, **kwargs) [ 566.877064] env[63279]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 566.877064] env[63279]: result = fn(*args, **kwargs) [ 566.877064] env[63279]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 566.877064] env[63279]: return f(*args, **kwargs) [ 566.877064] env[63279]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 566.877064] env[63279]: return db.service_get_minimum_version(context, binaries) [ 566.877064] env[63279]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 566.877064] env[63279]: _check_db_access() [ 566.877064] env[63279]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 566.877064] env[63279]: stacktrace = ''.join(traceback.format_stack()) [ 566.877064] env[63279]: [ 566.878059] env[63279]: ERROR nova.db.main.api [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 566.878059] env[63279]: result = function(*args, **kwargs) [ 566.878059] env[63279]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 566.878059] env[63279]: return func(*args, **kwargs) [ 566.878059] env[63279]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 566.878059] env[63279]: result = fn(*args, **kwargs) [ 566.878059] env[63279]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 566.878059] env[63279]: return f(*args, **kwargs) [ 566.878059] env[63279]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 566.878059] env[63279]: return db.service_get_minimum_version(context, binaries) [ 566.878059] env[63279]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 566.878059] env[63279]: _check_db_access() [ 566.878059] env[63279]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 566.878059] env[63279]: stacktrace = ''.join(traceback.format_stack()) [ 566.878059] env[63279]: [ 566.878481] env[63279]: WARNING nova.objects.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 566.878636] env[63279]: WARNING nova.objects.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Failed to get minimum service version for cell 1c4f3b80-350d-4718-a7ef-86b844869563 [ 566.879207] env[63279]: DEBUG oslo_concurrency.lockutils [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Acquiring lock "singleton_lock" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.879337] env[63279]: DEBUG oslo_concurrency.lockutils [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Acquired lock "singleton_lock" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.879475] env[63279]: DEBUG oslo_concurrency.lockutils [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Releasing lock "singleton_lock" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.879792] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Full set of CONF: {{(pid=63279) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 566.879938] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ******************************************************************************** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 566.880080] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] Configuration options gathered from: {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 566.880219] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 566.880413] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 566.880549] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ================================================================================ {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 566.880748] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] allow_resize_to_same_host = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.880917] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] arq_binding_timeout = 300 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.881061] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] backdoor_port = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.881191] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] backdoor_socket = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.881356] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] block_device_allocate_retries = 60 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.881522] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] block_device_allocate_retries_interval = 3 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.881686] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cert = self.pem {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.881852] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.882028] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute_monitors = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.882201] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] config_dir = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.882373] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] config_drive_format = iso9660 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.882506] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.882680] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] config_source = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.882826] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] console_host = devstack {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.882988] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] control_exchange = nova {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.883161] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cpu_allocation_ratio = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.883320] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] daemon = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.883489] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] debug = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.883644] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] default_access_ip_network_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.883808] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] default_availability_zone = nova {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.883966] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] default_ephemeral_format = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.884139] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] default_green_pool_size = 1000 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.884374] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.884542] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] default_schedule_zone = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.884698] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] disk_allocation_ratio = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.884857] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] enable_new_services = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.885046] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] enabled_apis = ['osapi_compute'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.885219] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] enabled_ssl_apis = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.885382] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] flat_injected = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.885540] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] force_config_drive = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.885837] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] force_raw_images = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.886051] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] graceful_shutdown_timeout = 5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.886227] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] heal_instance_info_cache_interval = 60 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.886448] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] host = cpu-1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.886627] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.886791] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] initial_disk_allocation_ratio = 1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.886951] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] initial_ram_allocation_ratio = 1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.887184] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.887351] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] instance_build_timeout = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.887512] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] instance_delete_interval = 300 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.887677] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] instance_format = [instance: %(uuid)s] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.887840] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] instance_name_template = instance-%08x {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.888011] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] instance_usage_audit = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.888186] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] instance_usage_audit_period = month {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.888406] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.888575] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] instances_path = /opt/stack/data/nova/instances {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.888739] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] internal_service_availability_zone = internal {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.888898] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] key = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.889071] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] live_migration_retry_count = 30 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.889243] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] log_color = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.889412] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] log_config_append = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.889577] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.889737] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] log_dir = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.889895] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] log_file = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.890032] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] log_options = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.890199] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] log_rotate_interval = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.890368] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] log_rotate_interval_type = days {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.890536] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] log_rotation_type = none {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.890665] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.890789] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.890957] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.891136] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.891266] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.891430] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] long_rpc_timeout = 1800 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.891589] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] max_concurrent_builds = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.891748] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] max_concurrent_live_migrations = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.891906] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] max_concurrent_snapshots = 5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.892075] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] max_local_block_devices = 3 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.892236] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] max_logfile_count = 30 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.892393] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] max_logfile_size_mb = 200 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.892551] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] maximum_instance_delete_attempts = 5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.892717] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] metadata_listen = 0.0.0.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.892881] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] metadata_listen_port = 8775 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.893058] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] metadata_workers = 2 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.893223] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] migrate_max_retries = -1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.893393] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] mkisofs_cmd = genisoimage {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.893598] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] my_block_storage_ip = 10.180.1.21 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.893730] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] my_ip = 10.180.1.21 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.893931] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.894106] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] network_allocate_retries = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.894285] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.894456] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] osapi_compute_listen = 0.0.0.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.894618] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] osapi_compute_listen_port = 8774 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.894786] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] osapi_compute_unique_server_name_scope = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.894954] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] osapi_compute_workers = 2 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.895130] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] password_length = 12 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.895290] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] periodic_enable = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.895452] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] periodic_fuzzy_delay = 60 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.895621] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] pointer_model = usbtablet {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.895789] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] preallocate_images = none {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.895951] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] publish_errors = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.896096] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] pybasedir = /opt/stack/nova {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.896325] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ram_allocation_ratio = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.896503] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] rate_limit_burst = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.896675] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] rate_limit_except_level = CRITICAL {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.896839] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] rate_limit_interval = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.896998] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] reboot_timeout = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.897176] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] reclaim_instance_interval = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.897334] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] record = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.897511] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] reimage_timeout_per_gb = 60 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.897678] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] report_interval = 120 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.897840] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] rescue_timeout = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.898006] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] reserved_host_cpus = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.898174] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] reserved_host_disk_mb = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.898389] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] reserved_host_memory_mb = 512 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.898553] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] reserved_huge_pages = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.898716] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] resize_confirm_window = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.898878] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] resize_fs_using_block_device = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.899053] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] resume_guests_state_on_host_boot = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.899229] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.899396] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] rpc_response_timeout = 60 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.899557] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] run_external_periodic_tasks = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.899726] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] running_deleted_instance_action = reap {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.899885] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] running_deleted_instance_poll_interval = 1800 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.900117] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] running_deleted_instance_timeout = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.900282] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] scheduler_instance_sync_interval = 120 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.900502] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] service_down_time = 720 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.900676] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] servicegroup_driver = db {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.900835] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] shell_completion = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.900997] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] shelved_offload_time = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.901174] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] shelved_poll_interval = 3600 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.901344] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] shutdown_timeout = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.901507] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] source_is_ipv6 = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.901668] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ssl_only = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.901917] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.902103] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] sync_power_state_interval = 600 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.902270] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] sync_power_state_pool_size = 1000 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.902443] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] syslog_log_facility = LOG_USER {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.902601] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] tempdir = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.902786] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] timeout_nbd = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.902926] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] transport_url = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.903098] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] update_resources_interval = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.903262] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] use_cow_images = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.903423] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] use_journal = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.903582] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] use_json = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.903740] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] use_rootwrap_daemon = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.903899] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] use_stderr = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.904069] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] use_syslog = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.904228] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vcpu_pin_set = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.904398] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vif_plugging_is_fatal = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.904581] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vif_plugging_timeout = 300 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.904726] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] virt_mkfs = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.904922] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] volume_usage_poll_interval = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.905103] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] watch_log_file = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.905278] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] web = /usr/share/spice-html5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 566.905463] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.905632] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.905795] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.905967] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_concurrency.disable_process_locking = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.906655] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.906853] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.907039] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.907225] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.907405] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.907578] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.907766] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.auth_strategy = keystone {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.907936] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.compute_link_prefix = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.908130] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.908366] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.dhcp_domain = novalocal {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.908538] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.enable_instance_password = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.908703] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.glance_link_prefix = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.908871] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.909057] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.909228] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.instance_list_per_project_cells = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.909393] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.list_records_by_skipping_down_cells = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.909556] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.local_metadata_per_cell = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.909725] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.max_limit = 1000 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.909891] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.metadata_cache_expiration = 15 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.910076] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.neutron_default_tenant_id = default {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.910252] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.response_validation = warn {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.910421] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.use_neutron_default_nets = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.910596] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.910751] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.910914] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.911107] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.911276] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.vendordata_dynamic_targets = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.911440] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.vendordata_jsonfile_path = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.911644] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.911855] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.backend = dogpile.cache.memcached {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.912024] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.backend_argument = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.912196] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.backend_expiration_time = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.912368] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.config_prefix = cache.oslo {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.912539] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.dead_timeout = 60.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.912703] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.debug_cache_backend = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.912886] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.enable_retry_client = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.913035] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.enable_socket_keepalive = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.913210] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.enabled = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.913376] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.enforce_fips_mode = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.913540] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.expiration_time = 600 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.913702] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.hashclient_retry_attempts = 2 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.913869] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.hashclient_retry_delay = 1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.914052] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.memcache_dead_retry = 300 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.914220] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.memcache_password = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.914388] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.914549] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.914710] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.memcache_pool_maxsize = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.914872] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.915048] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.memcache_sasl_enabled = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.915230] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.915399] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.memcache_socket_timeout = 1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.915558] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.memcache_username = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.915722] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.proxies = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.915884] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.redis_db = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.916058] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.redis_password = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.916237] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.redis_sentinel_service_name = mymaster {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.916413] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.916581] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.redis_server = localhost:6379 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.916744] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.redis_socket_timeout = 1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.916902] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.redis_username = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.917077] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.retry_attempts = 2 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.917245] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.retry_delay = 0.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.917407] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.socket_keepalive_count = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.917568] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.socket_keepalive_idle = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.917727] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.socket_keepalive_interval = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.917884] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.tls_allowed_ciphers = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.918054] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.tls_cafile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.918261] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.tls_certfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.918391] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.tls_enabled = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.918531] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cache.tls_keyfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.918701] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.auth_section = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.918874] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.auth_type = password {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.919046] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.cafile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.919232] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.catalog_info = volumev3::publicURL {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.919396] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.certfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.919560] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.collect_timing = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.919722] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.cross_az_attach = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.919883] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.debug = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.920054] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.endpoint_template = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.920219] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.http_retries = 3 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.920381] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.insecure = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.920535] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.keyfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.920704] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.os_region_name = RegionOne {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.920866] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.split_loggers = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.921042] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cinder.timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.921222] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.921388] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute.cpu_dedicated_set = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.921545] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute.cpu_shared_set = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.921711] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute.image_type_exclude_list = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.921876] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.922052] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute.max_concurrent_disk_ops = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.922221] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute.max_disk_devices_to_attach = -1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.922425] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.922558] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.922723] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute.resource_provider_association_refresh = 300 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.922887] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.923064] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute.shutdown_retry_interval = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.923247] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.923428] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] conductor.workers = 2 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.923607] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] console.allowed_origins = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.923770] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] console.ssl_ciphers = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.923946] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] console.ssl_minimum_version = default {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.924133] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] consoleauth.enforce_session_timeout = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.924309] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] consoleauth.token_ttl = 600 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.924487] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.cafile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.924647] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.certfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.924812] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.collect_timing = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.924972] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.connect_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.925145] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.connect_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.925305] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.endpoint_override = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.925470] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.insecure = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.925626] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.keyfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.925784] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.max_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.925940] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.min_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.926110] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.region_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.926272] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.retriable_status_codes = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.926430] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.service_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.926602] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.service_type = accelerator {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.926763] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.split_loggers = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.926923] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.status_code_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.927093] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.status_code_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.927255] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.927438] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.927600] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] cyborg.version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.927772] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.asyncio_connection = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.927933] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.asyncio_slave_connection = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.928119] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.backend = sqlalchemy {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.928313] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.connection = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.928492] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.connection_debug = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.928667] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.connection_parameters = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.928834] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.connection_recycle_time = 3600 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.929007] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.connection_trace = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.929183] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.db_inc_retry_interval = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.929376] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.db_max_retries = 20 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.929570] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.db_max_retry_interval = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.929738] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.db_retry_interval = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.929904] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.max_overflow = 50 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.930079] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.max_pool_size = 5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.930247] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.max_retries = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.930423] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.930586] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.mysql_wsrep_sync_wait = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.930747] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.pool_timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.930911] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.retry_interval = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.931080] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.slave_connection = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.931247] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.sqlite_synchronous = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.931412] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] database.use_db_reconnect = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.931580] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.asyncio_connection = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.931741] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.asyncio_slave_connection = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.931915] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.backend = sqlalchemy {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.932101] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.connection = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.932270] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.connection_debug = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.932445] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.connection_parameters = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.932612] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.connection_recycle_time = 3600 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.932775] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.connection_trace = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.932936] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.db_inc_retry_interval = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.933112] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.db_max_retries = 20 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.933278] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.db_max_retry_interval = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.933443] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.db_retry_interval = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.933604] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.max_overflow = 50 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.933765] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.max_pool_size = 5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.933924] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.max_retries = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.934106] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.934269] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.934429] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.pool_timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.934591] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.retry_interval = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.934749] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.slave_connection = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.934910] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] api_database.sqlite_synchronous = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.935095] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] devices.enabled_mdev_types = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.935273] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.935446] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ephemeral_storage_encryption.default_format = luks {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.935608] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ephemeral_storage_encryption.enabled = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.935770] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.935937] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.api_servers = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.936123] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.cafile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.936290] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.certfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.936452] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.collect_timing = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.936611] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.connect_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.936771] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.connect_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.936932] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.debug = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.937106] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.default_trusted_certificate_ids = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.937272] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.enable_certificate_validation = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.937436] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.enable_rbd_download = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.937602] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.endpoint_override = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.937763] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.insecure = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.937923] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.keyfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.938096] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.max_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.938283] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.min_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.938461] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.num_retries = 3 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.938632] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.rbd_ceph_conf = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.938798] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.rbd_connect_timeout = 5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.938968] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.rbd_pool = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.939152] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.rbd_user = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.939348] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.region_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.939517] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.retriable_status_codes = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.939680] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.service_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.939850] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.service_type = image {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.940025] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.split_loggers = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.940193] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.status_code_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.940357] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.status_code_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.940520] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.940700] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.940866] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.verify_glance_signatures = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.941038] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] glance.version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.941214] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] guestfs.debug = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.941385] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.auth_section = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.941549] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.auth_type = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.941710] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.cafile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.941869] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.certfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.942045] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.collect_timing = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.942210] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.connect_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.942373] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.connect_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.942532] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.endpoint_override = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.942696] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.insecure = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.942852] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.keyfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.943019] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.max_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.943178] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.min_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.943337] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.region_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.943495] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.retriable_status_codes = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.943652] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.service_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.943821] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.service_type = shared-file-system {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.943986] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.share_apply_policy_timeout = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.944162] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.split_loggers = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.944322] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.status_code_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.944480] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.status_code_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.944640] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.944820] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.944982] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] manila.version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.945166] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] mks.enabled = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.945515] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.945710] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] image_cache.manager_interval = 2400 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.945881] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] image_cache.precache_concurrency = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.946072] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] image_cache.remove_unused_base_images = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.946271] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.946457] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.946640] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] image_cache.subdirectory_name = _base {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.946820] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.api_max_retries = 60 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.946987] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.api_retry_interval = 2 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.947165] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.auth_section = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.947333] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.auth_type = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.947534] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.cafile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.947742] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.certfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.947920] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.collect_timing = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.948106] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.conductor_group = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.948321] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.connect_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.948503] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.connect_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.948669] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.endpoint_override = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.948836] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.insecure = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.948996] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.keyfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.949175] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.max_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.949360] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.min_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.949564] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.peer_list = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.949737] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.region_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.949900] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.retriable_status_codes = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.950079] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.serial_console_state_timeout = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.950243] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.service_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.950419] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.service_type = baremetal {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.950580] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.shard = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.950745] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.split_loggers = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.950908] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.status_code_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.951081] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.status_code_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.951247] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.951435] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.951597] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ironic.version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.951779] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.951954] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] key_manager.fixed_key = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.952153] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.952319] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.barbican_api_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.952483] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.barbican_endpoint = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.952655] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.barbican_endpoint_type = public {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.952816] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.barbican_region_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.952974] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.cafile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.953153] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.certfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.953319] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.collect_timing = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.953506] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.insecure = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.953681] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.keyfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.953852] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.number_of_retries = 60 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.954026] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.retry_delay = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.954198] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.send_service_user_token = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.954364] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.split_loggers = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.954527] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.954692] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.verify_ssl = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.954853] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican.verify_ssl_path = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.955033] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican_service_user.auth_section = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.955200] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican_service_user.auth_type = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.955363] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican_service_user.cafile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.955523] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican_service_user.certfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.955686] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican_service_user.collect_timing = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.955847] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican_service_user.insecure = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.956020] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican_service_user.keyfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.956186] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican_service_user.split_loggers = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.956359] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] barbican_service_user.timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.956531] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vault.approle_role_id = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.956691] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vault.approle_secret_id = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.956863] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vault.kv_mountpoint = secret {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.957045] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vault.kv_path = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.957217] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vault.kv_version = 2 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.957381] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vault.namespace = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.957542] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vault.root_token_id = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.957701] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vault.ssl_ca_crt_file = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.957869] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vault.timeout = 60.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.958044] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vault.use_ssl = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.958243] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.958431] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.cafile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.958597] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.certfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.958762] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.collect_timing = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.958939] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.connect_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.959137] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.connect_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.959327] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.endpoint_override = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.959499] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.insecure = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.959661] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.keyfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.959819] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.max_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.959977] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.min_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.960153] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.region_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.960317] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.retriable_status_codes = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.960482] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.service_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.960653] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.service_type = identity {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.960818] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.split_loggers = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.960979] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.status_code_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.961155] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.status_code_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.961314] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.961513] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.961697] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] keystone.version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.961894] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.ceph_mount_options = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.962208] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.962397] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.connection_uri = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.962562] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.cpu_mode = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.962733] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.cpu_model_extra_flags = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.962907] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.cpu_models = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.963094] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.cpu_power_governor_high = performance {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.963269] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.cpu_power_governor_low = powersave {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.963438] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.cpu_power_management = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.963612] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.963785] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.device_detach_attempts = 8 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.963953] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.device_detach_timeout = 20 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.964139] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.disk_cachemodes = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.964305] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.disk_prefix = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.964474] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.enabled_perf_events = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.964639] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.file_backed_memory = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.964804] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.gid_maps = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.964964] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.hw_disk_discard = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.965136] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.hw_machine_type = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.965312] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.images_rbd_ceph_conf = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.965496] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.965676] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.965850] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.images_rbd_glance_store_name = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.966031] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.images_rbd_pool = rbd {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.966210] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.images_type = default {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.966374] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.images_volume_group = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.966537] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.inject_key = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.966701] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.inject_partition = -2 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.966865] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.inject_password = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.967040] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.iscsi_iface = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.967211] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.iser_use_multipath = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.967377] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.live_migration_bandwidth = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.967540] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.967706] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.live_migration_downtime = 500 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.967870] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.968045] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.968235] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.live_migration_inbound_addr = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.968407] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.968572] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.live_migration_permit_post_copy = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.968731] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.live_migration_scheme = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.968907] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.live_migration_timeout_action = abort {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.969085] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.live_migration_tunnelled = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.969268] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.live_migration_uri = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.969469] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.live_migration_with_native_tls = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.969642] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.max_queues = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.969809] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.970046] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.970219] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.nfs_mount_options = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.970515] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.970695] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.970866] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.num_iser_scan_tries = 5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.971042] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.num_memory_encrypted_guests = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.971210] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.971377] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.num_pcie_ports = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.971545] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.num_volume_scan_tries = 5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.971713] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.pmem_namespaces = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.971874] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.quobyte_client_cfg = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.972188] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.972366] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.rbd_connect_timeout = 5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.972537] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.972705] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.972865] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.rbd_secret_uuid = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.973035] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.rbd_user = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.973206] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.973382] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.remote_filesystem_transport = ssh {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.973562] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.rescue_image_id = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.973740] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.rescue_kernel_id = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.973904] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.rescue_ramdisk_id = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.974089] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.974264] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.rx_queue_size = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.974446] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.smbfs_mount_options = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.974740] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.974916] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.snapshot_compression = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.975107] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.snapshot_image_format = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.975346] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.975528] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.sparse_logical_volumes = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.975696] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.swtpm_enabled = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.975868] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.swtpm_group = tss {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.976048] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.swtpm_user = tss {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.976226] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.sysinfo_serial = unique {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.976389] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.tb_cache_size = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.976550] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.tx_queue_size = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.976716] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.uid_maps = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.976882] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.use_virtio_for_bridges = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.977066] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.virt_type = kvm {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.977241] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.volume_clear = zero {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.977408] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.volume_clear_size = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.977597] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.volume_use_multipath = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.977768] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.vzstorage_cache_path = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.977941] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.978126] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.vzstorage_mount_group = qemu {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.978327] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.vzstorage_mount_opts = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.978505] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.978810] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.978995] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.vzstorage_mount_user = stack {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.979183] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.979384] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.auth_section = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.979564] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.auth_type = password {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.979726] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.cafile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.979886] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.certfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.980061] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.collect_timing = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.980227] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.connect_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.980390] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.connect_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.980562] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.default_floating_pool = public {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.980724] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.endpoint_override = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.980890] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.extension_sync_interval = 600 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.981069] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.http_retries = 3 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.981240] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.insecure = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.981404] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.keyfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.981563] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.max_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.981739] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.981899] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.min_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.982079] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.ovs_bridge = br-int {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.982249] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.physnets = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.982424] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.region_name = RegionOne {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.982586] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.retriable_status_codes = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.982756] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.service_metadata_proxy = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.982916] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.service_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.983098] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.service_type = network {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.983264] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.split_loggers = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.983429] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.status_code_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.983588] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.status_code_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.983747] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.983928] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.984103] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] neutron.version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.984281] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] notifications.bdms_in_notifications = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.984463] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] notifications.default_level = INFO {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.984629] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] notifications.include_share_mapping = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.984813] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] notifications.notification_format = unversioned {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.984982] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] notifications.notify_on_state_change = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.985174] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.985354] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] pci.alias = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.985529] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] pci.device_spec = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.985729] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] pci.report_in_placement = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.985909] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.auth_section = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.986098] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.auth_type = password {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.986270] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.986434] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.cafile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.986592] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.certfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.986755] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.collect_timing = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.986914] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.connect_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.987085] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.connect_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.987248] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.default_domain_id = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.987406] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.default_domain_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.987562] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.domain_id = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.987718] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.domain_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.987874] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.endpoint_override = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.988046] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.insecure = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.988234] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.keyfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.988393] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.max_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.988552] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.min_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.988719] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.password = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.988877] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.project_domain_id = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.989054] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.project_domain_name = Default {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.989233] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.project_id = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.989453] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.project_name = service {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.989636] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.region_name = RegionOne {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.989800] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.retriable_status_codes = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.989960] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.service_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.990146] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.service_type = placement {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.990311] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.split_loggers = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.990477] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.status_code_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.990638] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.status_code_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.990797] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.system_scope = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.990957] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.991131] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.trust_id = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.991293] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.user_domain_id = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.991462] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.user_domain_name = Default {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.991622] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.user_id = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.991793] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.username = nova {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.991973] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.992150] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] placement.version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.992331] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.cores = 20 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.992556] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.count_usage_from_placement = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.992746] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.992920] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.injected_file_content_bytes = 10240 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.993104] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.injected_file_path_length = 255 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.993278] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.injected_files = 5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.993451] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.instances = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.993636] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.key_pairs = 100 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.993785] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.metadata_items = 128 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.993954] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.ram = 51200 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.994134] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.recheck_quota = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.994307] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.server_group_members = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.994478] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.server_groups = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.994686] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.994862] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] quota.unified_limits_resource_strategy = require {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.995054] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.995226] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.995394] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] scheduler.image_metadata_prefilter = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.995560] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.995727] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] scheduler.max_attempts = 3 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.995891] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] scheduler.max_placement_results = 1000 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.996068] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.996235] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] scheduler.query_placement_for_image_type_support = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.996402] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.996578] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] scheduler.workers = 2 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.996752] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.996927] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.997120] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.997294] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.997461] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.997654] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.997840] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.998042] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.998238] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.host_subset_size = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.998417] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.998581] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.998745] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.998912] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.isolated_hosts = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.999091] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.isolated_images = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.999292] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.999468] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.999633] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.999797] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.pci_in_placement = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 566.999960] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.000137] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.000304] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.000468] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.000634] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.000796] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.000959] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.track_instance_changes = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.001153] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.001331] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] metrics.required = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.001529] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] metrics.weight_multiplier = 1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.001700] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.001868] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] metrics.weight_setting = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.002203] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.002387] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] serial_console.enabled = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.002568] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] serial_console.port_range = 10000:20000 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.002742] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.002915] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.003097] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] serial_console.serialproxy_port = 6083 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.003271] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] service_user.auth_section = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.003447] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] service_user.auth_type = password {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.003608] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] service_user.cafile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.003768] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] service_user.certfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.003930] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] service_user.collect_timing = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.004105] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] service_user.insecure = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.004265] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] service_user.keyfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.004436] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] service_user.send_service_user_token = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.004597] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] service_user.split_loggers = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.004755] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] service_user.timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.004926] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] spice.agent_enabled = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.005101] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] spice.enabled = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.005408] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.005613] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.005788] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] spice.html5proxy_port = 6082 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.005951] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] spice.image_compression = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.006127] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] spice.jpeg_compression = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.006291] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] spice.playback_compression = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.006459] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] spice.require_secure = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.006630] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] spice.server_listen = 127.0.0.1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.006800] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.006963] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] spice.streaming_mode = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.007138] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] spice.zlib_compression = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.007308] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] upgrade_levels.baseapi = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.007483] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] upgrade_levels.compute = auto {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.007647] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] upgrade_levels.conductor = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.007807] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] upgrade_levels.scheduler = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.007976] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vendordata_dynamic_auth.auth_section = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.008156] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vendordata_dynamic_auth.auth_type = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.008356] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vendordata_dynamic_auth.cafile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.008522] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vendordata_dynamic_auth.certfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.008688] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.008849] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vendordata_dynamic_auth.insecure = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.009013] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vendordata_dynamic_auth.keyfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.009185] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.009417] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vendordata_dynamic_auth.timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.009619] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.api_retry_count = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.009787] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.ca_file = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.009963] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.cache_prefix = devstack-image-cache {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.010152] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.cluster_name = testcl1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.010321] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.connection_pool_size = 10 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.010484] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.console_delay_seconds = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.010657] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.datastore_regex = ^datastore.* {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.010871] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.011062] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.host_password = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.011240] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.host_port = 443 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.011422] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.host_username = administrator@vsphere.local {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.011589] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.insecure = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.011754] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.integration_bridge = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.011920] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.maximum_objects = 100 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.012102] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.pbm_default_policy = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.012273] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.pbm_enabled = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.012438] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.pbm_wsdl_location = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.012609] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.012774] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.serial_port_proxy_uri = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.012938] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.serial_port_service_uri = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.013121] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.task_poll_interval = 0.5 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.013298] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.use_linked_clone = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.013495] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.vnc_keymap = en-us {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.013677] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.vnc_port = 5900 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.013847] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vmware.vnc_port_total = 10000 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.014048] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vnc.auth_schemes = ['none'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.014239] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vnc.enabled = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.014528] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.014717] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.014893] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vnc.novncproxy_port = 6080 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.015094] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vnc.server_listen = 127.0.0.1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.015283] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.015488] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vnc.vencrypt_ca_certs = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.015676] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vnc.vencrypt_client_cert = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.015844] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vnc.vencrypt_client_key = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.016033] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.016209] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.disable_deep_image_inspection = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.016377] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.016543] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.016708] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.016873] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.disable_rootwrap = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.017058] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.enable_numa_live_migration = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.017231] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.017396] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.017560] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.017722] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.libvirt_disable_apic = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.017882] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.018058] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.018243] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.018417] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.018587] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.018753] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.018915] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.019089] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.019285] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.019477] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.019669] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.019843] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] wsgi.client_socket_timeout = 900 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.020022] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] wsgi.default_pool_size = 1000 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.020199] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] wsgi.keep_alive = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.020371] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] wsgi.max_header_line = 16384 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.020535] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] wsgi.secure_proxy_ssl_header = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.020698] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] wsgi.ssl_ca_file = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.020857] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] wsgi.ssl_cert_file = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.021027] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] wsgi.ssl_key_file = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.021202] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] wsgi.tcp_keepidle = 600 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.021406] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.021595] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] zvm.ca_file = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.021760] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] zvm.cloud_connector_url = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.022079] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.022265] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] zvm.reachable_timeout = 300 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.022444] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.022624] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.022804] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] profiler.connection_string = messaging:// {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.022976] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] profiler.enabled = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.023165] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] profiler.es_doc_type = notification {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.023333] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] profiler.es_scroll_size = 10000 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.023506] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] profiler.es_scroll_time = 2m {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.023670] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] profiler.filter_error_trace = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.023842] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] profiler.hmac_keys = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.024023] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] profiler.sentinel_service_name = mymaster {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.024196] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] profiler.socket_timeout = 0.1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.024359] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] profiler.trace_requests = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.024523] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] profiler.trace_sqlalchemy = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.024705] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] profiler_jaeger.process_tags = {} {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.024865] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] profiler_jaeger.service_name_prefix = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.025041] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] profiler_otlp.service_name_prefix = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.025210] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] remote_debug.host = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.025372] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] remote_debug.port = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.025573] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.025741] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.025905] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.026078] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.026244] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.026409] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.026571] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.026733] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.026897] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.027081] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.027246] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.027417] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.027583] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.027750] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.027921] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.028098] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.028292] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.028482] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.028651] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.028817] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.028986] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.029169] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.029376] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.029561] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.029730] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.029896] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.030074] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.030243] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.030413] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.030575] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.ssl = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.030746] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.030923] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.031104] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.031277] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.031447] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.ssl_version = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.031609] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.031793] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.031966] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_notifications.retry = -1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.032160] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.032333] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_messaging_notifications.transport_url = **** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.032507] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.auth_section = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.032672] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.auth_type = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.032832] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.cafile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.032991] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.certfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.033168] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.collect_timing = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.033328] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.connect_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.033515] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.connect_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.033684] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.endpoint_id = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.033857] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.endpoint_interface = publicURL {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.034028] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.endpoint_override = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.034192] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.endpoint_region_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.034353] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.endpoint_service_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.034517] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.endpoint_service_type = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.034682] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.insecure = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.034839] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.keyfile = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.034995] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.max_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.035167] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.min_version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.035324] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.region_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.035484] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.retriable_status_codes = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.035642] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.service_name = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.035799] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.service_type = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.035961] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.split_loggers = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.036134] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.status_code_retries = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.036296] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.status_code_retry_delay = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.036455] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.timeout = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.036613] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.valid_interfaces = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.036769] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_limit.version = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.036932] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_reports.file_event_handler = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.037115] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.037277] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] oslo_reports.log_dir = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.037449] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.037607] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.037767] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.037930] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.038110] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.038297] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.038483] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.038646] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vif_plug_ovs_privileged.group = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.038807] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.038975] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.039176] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.039374] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] vif_plug_ovs_privileged.user = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.039560] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_linux_bridge.flat_interface = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.039744] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.039919] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.040109] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.040284] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.040455] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.040623] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.040787] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.040965] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.041152] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_ovs.isolate_vif = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.041324] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.041492] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.041661] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.041831] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_ovs.ovsdb_interface = native {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.041994] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] os_vif_ovs.per_port_bridge = False {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.042182] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] privsep_osbrick.capabilities = [21] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.042346] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] privsep_osbrick.group = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.042507] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] privsep_osbrick.helper_command = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.042673] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.042839] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.043007] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] privsep_osbrick.user = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.043190] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.043351] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] nova_sys_admin.group = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.043514] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] nova_sys_admin.helper_command = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.043679] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.043845] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.044020] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] nova_sys_admin.user = None {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 567.044149] env[63279]: DEBUG oslo_service.service [None req-901f9cfb-02d0-496e-a107-4666c5c9b2d1 None None] ******************************************************************************** {{(pid=63279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 567.044610] env[63279]: INFO nova.service [-] Starting compute node (version 29.0.2) [ 567.548920] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Getting list of instances from cluster (obj){ [ 567.548920] env[63279]: value = "domain-c8" [ 567.548920] env[63279]: _type = "ClusterComputeResource" [ 567.548920] env[63279]: } {{(pid=63279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 567.550119] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d7da79-0c82-4625-a248-682a8693b9bf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.559055] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Got total of 0 instances {{(pid=63279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 567.559664] env[63279]: WARNING nova.virt.vmwareapi.driver [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 567.560155] env[63279]: INFO nova.virt.node [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Generated node identity 0ba7c625-a0fc-4d3c-b804-196d00f00137 [ 567.560395] env[63279]: INFO nova.virt.node [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Wrote node identity 0ba7c625-a0fc-4d3c-b804-196d00f00137 to /opt/stack/data/n-cpu-1/compute_id [ 568.063112] env[63279]: WARNING nova.compute.manager [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Compute nodes ['0ba7c625-a0fc-4d3c-b804-196d00f00137'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 569.067711] env[63279]: INFO nova.compute.manager [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 570.072366] env[63279]: WARNING nova.compute.manager [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 570.072706] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.073198] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.073358] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.073516] env[63279]: DEBUG nova.compute.resource_tracker [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 570.074430] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02122d5-eb2f-45a3-b020-2551fe0f69cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.082528] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234dd8c0-81a2-41c3-897b-ea2f9019f67a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.095791] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cadda90-3e06-4d7e-8b87-f214bdf9276a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.101994] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05ed5c3-62a8-48ca-bb18-7099803e7a21 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.129316] env[63279]: DEBUG nova.compute.resource_tracker [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181287MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 570.129458] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.129650] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.632490] env[63279]: WARNING nova.compute.resource_tracker [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] No compute node record for cpu-1:0ba7c625-a0fc-4d3c-b804-196d00f00137: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 0ba7c625-a0fc-4d3c-b804-196d00f00137 could not be found. [ 571.136878] env[63279]: INFO nova.compute.resource_tracker [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 0ba7c625-a0fc-4d3c-b804-196d00f00137 [ 572.645072] env[63279]: DEBUG nova.compute.resource_tracker [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 572.645425] env[63279]: DEBUG nova.compute.resource_tracker [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 572.797253] env[63279]: INFO nova.scheduler.client.report [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] [req-ee43a315-23d2-4589-9b91-f39072418c55] Created resource provider record via placement API for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 572.814412] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58814370-c09f-414e-b952-7994ae947675 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.822071] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20877c4f-9110-46d8-8808-a2c6de9fca09 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.851366] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a75d5a-2e08-42db-a5c9-e3a9bf5e4cdc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.858423] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752f150a-81c7-4e2e-bdeb-6f6a306e7b2b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.871413] env[63279]: DEBUG nova.compute.provider_tree [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 573.409622] env[63279]: DEBUG nova.scheduler.client.report [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 573.409861] env[63279]: DEBUG nova.compute.provider_tree [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 0 to 1 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 573.410010] env[63279]: DEBUG nova.compute.provider_tree [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 573.457720] env[63279]: DEBUG nova.compute.provider_tree [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 1 to 2 during operation: update_traits {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 573.961996] env[63279]: DEBUG nova.compute.resource_tracker [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 573.962347] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.833s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.962410] env[63279]: DEBUG nova.service [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Creating RPC server for service compute {{(pid=63279) start /opt/stack/nova/nova/service.py:186}} [ 573.974952] env[63279]: DEBUG nova.service [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] Join ServiceGroup membership for this service compute {{(pid=63279) start /opt/stack/nova/nova/service.py:203}} [ 573.975137] env[63279]: DEBUG nova.servicegroup.drivers.db [None req-7ad6c3db-936d-441b-a265-9b4c0c0b39d4 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=63279) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 608.976706] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 609.480706] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Getting list of instances from cluster (obj){ [ 609.480706] env[63279]: value = "domain-c8" [ 609.480706] env[63279]: _type = "ClusterComputeResource" [ 609.480706] env[63279]: } {{(pid=63279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 609.481934] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3726cdb7-4f21-4c6f-aec5-89f45fa09dcf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.490656] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Got total of 0 instances {{(pid=63279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 609.490880] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 609.491189] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Getting list of instances from cluster (obj){ [ 609.491189] env[63279]: value = "domain-c8" [ 609.491189] env[63279]: _type = "ClusterComputeResource" [ 609.491189] env[63279]: } {{(pid=63279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 609.492016] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350e5751-6ceb-49d1-abe3-eec430322be3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.499186] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Got total of 0 instances {{(pid=63279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 622.449472] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.449861] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.449998] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 622.450132] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 622.954063] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 622.954063] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.954341] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.954341] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.954559] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.954767] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.954964] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.955146] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 622.955300] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 623.458106] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.458464] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.458565] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.458712] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 623.459600] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcdf853-1092-43e0-b18b-13b515f12c0c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.467685] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ba4cc6-5889-4664-8ff6-4290d4ef9b7e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.482865] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591c5450-d61e-40a0-a414-f04593602a0d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.488726] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5999d644-40f8-40ef-9195-47f38e366ce1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.516314] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181285MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 623.516442] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.516652] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.535251] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 624.535542] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 624.549730] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c75aacb-a1a5-40cd-b647-66ee23c4a63f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.556776] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059444c0-08a1-4a51-9896-91e27ae7566a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.585694] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a88a76-1fd0-4b7a-b491-060c52e9aee4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.592082] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f76c2b-d25f-46e2-891d-91c55b72659c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.604882] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.107711] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 625.613531] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 625.613876] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.097s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.600985] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 685.601391] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.107202] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.107397] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 686.107457] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 686.611582] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 686.611919] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.612061] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.612160] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.612312] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.612456] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.612631] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.612764] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 686.612909] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 687.116468] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.116778] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.116888] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.117054] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 687.117908] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8655b11e-a9e8-4f52-846f-f5c962480189 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.126244] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a144765-57ef-45ba-ae0d-89e6d6de1cb5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.140504] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a925f8ac-ca1a-4423-9889-0e55895c96ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.146718] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9127e50a-72e9-45ed-9bcb-37329a99f812 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.174127] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181291MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 687.174277] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.174452] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.192129] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 688.192386] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 688.204742] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b1ef6a-63f4-4b64-b94b-ab8094f40ccd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.212755] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cb4a85-ea6a-4a26-bc05-22eefec8a2d2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.242447] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472a6040-62b0-430b-9012-c751c473696c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.249453] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48132987-8722-4547-a295-1143b15ff262 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.262226] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 688.765246] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 688.766557] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 688.766741] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.768342] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 748.768776] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 748.768776] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 748.768885] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 749.272100] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 749.272349] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.272479] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.272630] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.272778] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.272916] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.273071] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.273202] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 749.273343] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.777077] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.777466] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.777545] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.777651] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 749.778611] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfdaa03-7959-4f2d-b064-d23eff916ec7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.786864] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbca195-ecf4-4b9b-8574-1d5e87fa542c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.800775] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2b5ae2-d589-42ac-840c-de9be4bbe584 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.806510] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ff57ca-9428-4c8d-aaf3-f9bd745a6d69 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.835087] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181260MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 749.835225] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.835404] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.853715] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 750.854015] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 750.866405] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e19326-6980-4c65-acb4-3f856c6502da {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.873970] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1951ad-2b6c-44d7-8503-6574bda21056 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.902261] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39be9cc-fab4-428c-8227-9ae511fb3937 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.908809] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98be2ec4-b009-40fe-afca-b9cbf82bb3e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.922134] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.425088] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.426416] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 751.426601] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.094242] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 810.094635] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 810.598909] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 810.599090] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 810.599282] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 811.101815] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 811.102244] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.102244] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.102339] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.102517] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.102679] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.102823] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.102950] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 811.103101] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.607016] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.607281] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.607450] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.607606] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 811.608482] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d57a9ec-c059-42e8-a200-bfb768cf82c8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.616705] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42875750-7a2d-468f-8f05-1e46297b9d7b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.630178] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bef0c9-9bf9-4689-a3b4-05f1871421b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.636199] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f1e982-c727-4316-b35c-07d3404c5938 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.664660] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181249MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 811.664847] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.665012] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.686962] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 812.687235] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 812.702911] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7974f4-cb33-4835-bff1-d20e1d1d62a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.712600] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7c511a-f37b-4fef-bba3-44d4b73b8d98 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.741136] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e28b82-6c78-4e80-8686-09e8b0a913ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.747962] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8960293-0aa0-4a82-9731-6c2e224ee060 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.760483] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.263888] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 813.265171] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 813.265362] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.600s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.441789] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.442200] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Cleaning up deleted instances {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11747}} [ 862.946017] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] There are 0 instances to clean {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11756}} [ 862.946284] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.946432] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Cleaning up deleted instances with incomplete migration {{(pid=63279) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11785}} [ 863.448643] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 864.950483] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 864.950843] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 864.950843] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 864.950963] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.454187] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.454430] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.454612] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.454761] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 865.455767] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b1f554-8be3-444c-82d2-2358b20d25b6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.464015] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b109691-78b4-4a28-b80f-58258c86af9c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.477548] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b70144-820d-4f7f-8693-f7e7430002d2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.483634] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b41dd3a-8ef7-4e82-ae99-fef219c184d7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.512359] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181241MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 865.512536] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.512712] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.532025] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 866.532310] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 866.544743] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9701193d-a4f8-44c8-8734-d0456d152c82 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.552351] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02ed0eb-8e03-4032-96b8-d0f8dcc09579 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.580882] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aadd3203-a5d6-47f9-bbaa-13b2a193069e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.587734] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c55a5d-5519-47ec-b274-ccb1d4028ec8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.600227] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.103472] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 867.104880] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 867.105083] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.595810] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 867.596148] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 867.596148] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 868.100433] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 868.100687] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.100854] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.101016] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.101168] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.101312] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 923.441075] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 923.441572] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 925.437515] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.442571] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.442571] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 925.442571] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 925.944069] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 925.944264] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.944358] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 926.447720] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.448134] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.448194] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.448300] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 926.449523] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afd0bf6-ebd8-4eac-a56e-15767962593d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.457569] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045ba0d6-c8e8-4fea-a16c-6fa52cbde1e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.470949] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbea6140-8902-4e92-9331-fa3da238eceb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.477122] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6aea010-a9a8-4126-830a-7b4bbcfd6737 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.505813] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181286MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 926.505995] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.506154] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.541408] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 927.541408] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 927.556701] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 927.572528] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 927.572528] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 927.584107] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 927.598985] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 927.610168] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5edc03-a991-4ca6-878d-49ed29712968 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.617273] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8929a563-d7ae-4d80-909e-e92149d582d6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.646090] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7649fcd6-8201-40cd-ad56-b90fb15fbc40 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.652979] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d7ddd1-a673-40ec-95bd-b9e69969dbcf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.665510] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.169292] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 928.170775] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 928.170960] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.665s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.668646] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.668646] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.668646] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.668646] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 931.439712] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.441416] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.441693] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 985.441732] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.442189] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 985.442189] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 985.945234] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 985.945479] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.945651] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.448414] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.448677] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.448829] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.448980] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 986.449892] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad1b029-5f14-4cbb-a6fb-0b002d8cc7aa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.457969] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee625ad9-274a-495e-a294-6732a4749a7f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.471451] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8c234a-820f-41bf-b6ca-042e69740f2a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.477397] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6549b317-c89f-4636-8e09-81e2b5c17e67 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.505070] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181297MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 986.505215] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.505403] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.523734] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 987.523993] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 987.536431] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13e8066-02b8-43d7-9d01-e0549a9a76df {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.544007] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4fa701-fb1a-454c-b8a7-dc7e101e279d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.572667] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03049cac-f2c6-45c9-9e17-94dce23f3efc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.579568] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed2b776-0408-43b8-9b9b-07ad82214d59 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.592128] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.094938] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 988.096297] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 988.096481] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.091403] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.091812] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.091812] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.091958] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.092054] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.441618] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.442141] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1045.442552] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1046.441653] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1046.945197] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.945549] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.945640] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.945827] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1046.946774] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f65b8a9c-5ba6-400a-8ba2-f24c1ca8d521 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.955130] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e043b25-7762-407f-ba57-147d38ae737c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.968960] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357d28e9-0102-4469-bffa-6ab83e59b627 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.975071] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acad4548-0dfb-418a-b2e4-24c3230a0dc7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.002757] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181291MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1047.002931] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.003111] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.021410] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1048.021666] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1048.034505] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a7b76b-6ea0-4897-a347-103f0723a252 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.042094] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b10d34d-cc49-41e2-bdf8-ab7ae4d693a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.071714] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fffab462-ed9a-4080-b4bf-ef8d2c7e6ae7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.078452] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550457df-027b-4231-bd34-cf1db5420e16 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.090755] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.594228] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1048.595521] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1048.595747] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.591211] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1049.591446] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1049.591589] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1049.591706] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 1050.095076] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 1050.095331] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.095472] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.095625] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1051.442019] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.436837] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.442368] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.442222] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.442403] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1106.442623] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.945669] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.945999] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.946219] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.946381] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1106.947307] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfdb14a5-a579-4c82-bf71-82888604b192 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.956755] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9aea6c8-07ba-4a6b-9fe3-3976ccb051f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.970970] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20348359-fa7d-4147-b581-45418d376126 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.977407] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9caae816-1752-460f-a21e-6e68f6d7ad6f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.005068] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181301MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1107.005214] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.005403] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.023966] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1108.024254] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1108.038393] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-077838b6-f06d-49a5-a3ff-fd59e5552ab3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.045996] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ecfe329-0012-4263-b6b6-f5de2100d35a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.075127] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3039c4b9-4408-42f1-80e8-14119acf604a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.081983] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3172006-2255-471d-ae66-40a45df90209 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.094754] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.598284] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1108.599564] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1108.599746] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.594120] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.594497] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.594497] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1109.594619] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 1110.097522] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 1110.097743] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.097904] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.098110] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.441902] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.443556] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.444033] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.444033] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1167.443081] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.945017] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1167.945365] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1167.945495] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.945650] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1167.946639] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc01124-48e5-42f5-8b04-d6cbc59fbe50 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.955074] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b4d3aa-9905-4c76-8096-6bf7db184c26 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.969251] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f07dbf-02da-4589-bb0b-be1fcdb82d9c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.975639] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb6907c-ace4-4292-82e6-f4bd71928d52 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.005370] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181291MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1168.005545] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.005704] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1169.023618] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1169.023876] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1169.036800] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5305a05d-a7c3-4c6b-b8f3-42aba1fb3057 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.044528] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397bea0f-cfff-4e38-b078-cdcdcd4e91b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.073666] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc45124-13b2-4965-b8bf-3651a41da54a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.080428] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3feda916-198b-4c49-82fe-bbc00df45944 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.093946] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1169.597271] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1169.598574] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1169.598755] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1169.598976] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.599122] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Cleaning up deleted instances with incomplete migration {{(pid=63279) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11785}} [ 1170.441054] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.441492] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.441492] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1170.441559] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 1170.944771] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 1170.944998] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.945187] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.945366] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.943306] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.441362] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.441708] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.441769] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Cleaning up deleted instances {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11747}} [ 1173.944474] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] There are 0 instances to clean {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11756}} [ 1178.941091] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.979967] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.483600] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Getting list of instances from cluster (obj){ [ 1209.483600] env[63279]: value = "domain-c8" [ 1209.483600] env[63279]: _type = "ClusterComputeResource" [ 1209.483600] env[63279]: } {{(pid=63279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1209.484730] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225259a3-1d96-4f81-8400-d6e75681eca6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.493342] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Got total of 0 instances {{(pid=63279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1226.443383] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1226.443806] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1226.443806] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1228.441236] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.944112] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.944366] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.944569] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.944733] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1228.945627] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d4f522-15c1-449b-ab1e-9608f215197f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.954396] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19dd7e32-ef5e-4fb3-915d-65048471a094 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.968971] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c28679d-7b92-453b-9a03-bb484c708069 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.975361] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7141860b-79d5-4b4f-87f0-7b6d4b422bc9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.004794] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181296MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1229.005040] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.005250] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.120686] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1230.120942] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1230.136571] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1230.147607] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1230.147792] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1230.157098] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1230.172977] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1230.185937] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f766ae3-fd39-41eb-b8c7-98e684e17768 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.193782] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586eefbb-7444-4d2b-ba91-f61f03ae70a0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.223976] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cc1a23-5835-46dc-b0f7-7c1d503fda8a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.231056] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23117f7-06b3-4076-97bc-050886ab6e17 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.243407] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1230.747050] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1230.748331] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1230.748516] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.743s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.750225] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.750225] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.750225] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1232.750225] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 1233.257379] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 1233.257379] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.257379] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.257379] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.441283] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1286.441509] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1286.441943] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1288.441963] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.440933] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.441446] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.944623] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.944849] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.945067] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.945239] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1290.946116] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95e92b4-579c-44b6-97e0-a3b234b78ef5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.954679] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d92768-d5f0-4234-8ae9-edcc582836bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.969135] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d9dba9-5d00-49c5-9214-4f8fe20c153f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.975529] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21bfec79-770c-442f-8428-8c85a5aedcd4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.004449] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181297MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1291.004600] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.004798] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.022438] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1292.022723] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1292.035913] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb495fdb-f28c-4c10-9720-2bd11ef16a92 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.043579] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e081a08d-6b49-4f3e-86a0-ac732ccbce92 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.072117] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3d3cda-ad95-4185-86bf-9c5cc3990310 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.079214] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e93c529-baf3-47e8-846e-233e94fb88a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.092951] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1292.596021] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1292.596970] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1292.597187] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.593637] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.593982] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.594033] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1293.594144] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 1294.097462] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 1294.097715] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1294.097876] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.442451] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1298.436558] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.444080] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.444080] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1350.442228] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1351.441103] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.436960] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.440623] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.440800] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.944557] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.944834] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.945087] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.945281] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1352.946471] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82112349-8bd9-4935-b5a9-4fdd0d207906 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.955674] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ec953b-6fb3-475d-8fd7-097f6068bb2d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.969777] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f45b7c-8517-47f8-955b-43dfae331895 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.976246] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00b3629-10fc-47c7-901c-f5060e31e608 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.004836] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181276MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1353.004993] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.005166] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.024699] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1354.024978] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1354.038956] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d0aa6a-a15c-4c6f-a7eb-42b7b27c55f1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.046555] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f0277b-2de7-45c1-89f2-f2e565cdf3a0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.075043] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92bad055-331e-42bc-9d08-f7448173fcaf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.081668] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0236ca60-cbc5-405f-91f2-c8902e6d714d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.095077] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1354.598516] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1354.599760] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1354.599933] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.595s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.600632] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.600964] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1355.600964] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 1356.105125] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 1356.105412] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.441297] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1406.443705] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1406.444139] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1410.442662] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.442059] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.442059] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1413.442059] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 1413.944538] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 1413.944790] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.944943] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.945132] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.448468] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.448837] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.448967] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.449107] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1414.450409] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d9b951-6890-4694-9736-eedfc4e5b548 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.458729] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95dde4cd-c735-4c84-9acb-d8e66e75f661 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.472453] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7378febf-c681-45d8-9eb3-70cf82d80ef7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.478446] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c30e2b-4cea-4422-b64e-be1f533126ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.508081] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181303MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1414.508275] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.508444] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.526359] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1415.526652] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1415.539269] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6025ea-05c1-4731-984d-a9110a77f4fa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.546739] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce96b71-8d9f-41c3-949d-5ef04ddbeeb1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.575135] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb9a9ea-9bc9-4d83-859d-b8d7f3c7efe9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.581838] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974f7c3b-4f49-4f93-81ef-d9d210fa444a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.594116] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1416.097245] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1416.098522] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1416.098715] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.590s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.595416] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1416.595878] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.436477] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.943133] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1466.443399] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1466.443399] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1471.444189] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.436786] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.440421] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.441285] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.441704] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1475.441704] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 1475.944406] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 1475.944635] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.944784] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.944946] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1476.448500] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.448903] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.448944] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.449104] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1476.450143] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac73230b-d713-4b2c-9ad7-64d13ae2a846 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.458158] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb2d721-70e7-48a8-a6cc-bf05e2e69f52 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.471946] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b4d7eb-2a14-4398-84bc-b9bdafbadb8b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.478038] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afa0585-6306-4199-b467-d0eafa4a0f15 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.506212] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181296MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1476.506341] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.506524] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.526581] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1477.526914] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1477.541833] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c0334e-0e54-43b7-b5b3-de43e302d2b1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.552624] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8ad848-8c9a-4175-a350-59f9b20af2a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.599167] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592b01fb-538e-45aa-943b-44dee4c96401 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.606317] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80690dfe-0471-4a94-a44c-ebb73668c898 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.619090] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1478.123041] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1478.123599] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1478.123792] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.617s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.124018] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1478.124159] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Cleaning up deleted instances {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11747}} [ 1478.626614] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] There are 0 instances to clean {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11756}} [ 1478.626990] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1478.626990] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Cleaning up deleted instances with incomplete migration {{(pid=63279) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11785}} [ 1481.626254] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1483.441650] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1526.944193] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1526.944531] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1533.442162] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.436866] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1535.441738] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1535.442125] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1536.441891] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1536.442317] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1536.442317] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 1536.945419] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 1536.945683] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1537.448903] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.449290] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.449414] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.449609] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1537.450559] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4577a9-0a7f-4962-a682-d88fb9ade26a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.459095] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3553ca61-1e3e-4b48-aacb-a60c626bd2c8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.472655] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f50ec6-2306-475e-9dc9-8774f43e227b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.478728] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c9d3e7-769c-4db1-bea7-c166abbba930 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.507472] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181299MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1537.507606] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.507790] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.539321] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1538.539559] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1538.554435] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1538.565306] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1538.565478] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1538.574802] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1538.589891] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1538.600859] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721a93b6-b525-4063-ae71-d41756cd8b30 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.608557] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc6fa64-4332-4123-810e-ff54ecdec7d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.636910] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbba82d8-4ffb-406f-82f0-7d46ac20972c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.643575] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7f56b3-db36-41d5-836e-de8a6b20e2aa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.655942] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1539.159360] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1539.160629] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1539.160811] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.653s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.656410] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1539.656764] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1540.436619] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1587.444465] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1587.444880] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1594.442028] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.438608] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.440189] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.440342] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1596.440463] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 1596.943989] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 1596.944243] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1597.441348] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1598.441062] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1598.944405] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.944709] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.944851] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.945015] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1598.945947] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e21525c-86f2-44e4-8b78-771c54f3a598 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.954300] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa786e98-2339-48fd-a148-6211c91e77a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.968067] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d2b449-72a7-4feb-b820-1cc7bf6d9aef {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.974167] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88728844-298e-44eb-af35-6774346f813a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.003071] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181304MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1599.003212] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1599.003405] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.020940] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1600.021240] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1600.036044] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216d76de-52ac-4041-8ecb-0f3b85cb4606 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.044068] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66fbbc31-9c3d-4c8c-9901-fecd9dd49d27 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.081807] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8094f5-ecdb-4ae9-806d-1f5d542e6a0a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.090545] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c44dec8-6045-481f-af1d-d31fbf092ea9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.103986] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1600.607289] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1600.608575] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1600.608765] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.605s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.609447] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1601.609769] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1649.441582] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1649.441964] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1656.443832] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1656.444241] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1657.442828] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1657.443033] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1657.443125] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 1657.945960] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 1658.940025] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1659.441471] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1659.441925] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1659.945019] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.945274] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.945453] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.945651] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1659.946571] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7534592-a87b-46bc-9be7-67ff05be411a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.955210] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c338c8d4-5cb2-449e-ad53-7beab004475a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.969455] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a762850f-d611-4113-b311-0a4f85e214c6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.975653] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7b1ba3-501d-4cff-b5f2-1770c2f3960d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.003492] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181283MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1660.003634] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.003829] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.022439] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1661.022751] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1661.035888] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3320866c-c61c-42c8-b190-12ae030b14dc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.044643] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b79d0b-60aa-4b25-a0b2-c87fffa4d645 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.073811] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff49fc6e-7db9-4ad6-a63b-3c7bb6ad358e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.081186] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d933a1b1-6ca9-4974-84f6-1dedd3b3ddc9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.094092] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1661.597588] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1661.598879] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1661.599075] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.595s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.598629] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1662.598976] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1665.436380] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1709.441544] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1709.441544] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1716.443859] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.441621] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.441804] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1717.441919] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 1717.945022] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 1717.945394] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1719.441613] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1720.437024] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1721.441860] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1721.442227] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1721.945376] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.945640] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.945850] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.946021] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1721.946915] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d907ae1a-9982-4bce-b558-25e95d95687d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.955389] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ce496a-8253-4f5c-bcfb-0f03b5e76b53 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.969289] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ed684c-856d-4f78-8075-fe1c6bda6fc4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.975482] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719bcfc5-79fa-4529-b779-b5a3fde65654 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.003081] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181294MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1722.003225] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.003470] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.021439] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1723.021687] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1723.034812] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2fcd9c-8943-4b85-85ac-78ab6be4fa1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.042123] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5fd16d-c449-41b3-8349-4eefa2fbb9a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.071468] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b6d571-e3da-4bb4-ad44-9c55abb32af8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.078218] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b05dc8-9c58-436f-98fc-e1b8c73edcfa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.090503] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1723.593855] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1723.595203] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1723.595384] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.595099] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.443758] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.444308] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1777.444613] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1778.441572] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1779.440772] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1779.441122] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1779.441122] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 1779.944540] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 1779.944925] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1780.940605] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1782.442228] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1782.944913] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.945157] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.945425] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.945527] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1782.946717] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224b1a11-5e9f-49ce-b2de-6c9849d5697b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.954949] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f33f458-3698-4410-af79-d8cce6ab620b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.976721] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a0f53e-29d9-40e3-9117-f99953d398ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.985903] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b24d477e-82cf-4803-9a06-1bbeca96ae92 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.019749] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181293MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1783.019749] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.019749] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.049378] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1784.050246] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1784.066119] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c41cbfb-67a7-49fc-9f00-42ecaa4c85cf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.072646] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934ec120-a717-4ee8-8cbd-2244e519144d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.105803] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142612ff-b9bc-441b-911d-8b7453d4b77f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.115703] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30f03b6-bb62-4bce-af75-a769ad9aee11 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.131709] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1784.636542] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1784.638557] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1784.639596] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.619s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.639883] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1784.640104] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Cleaning up deleted instances {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11747}} [ 1785.146740] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] There are 0 instances to clean {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11756}} [ 1786.146586] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1786.146846] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1789.436586] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1790.444714] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1790.444999] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Cleaning up deleted instances with incomplete migration {{(pid=63279) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11785}} [ 1790.946743] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Acquiring lock "4063d5e0-1144-40fa-8ed8-efda16730617" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.946743] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Lock "4063d5e0-1144-40fa-8ed8-efda16730617" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.973762] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.973993] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.415466] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Acquiring lock "ac1d0e8f-446a-4a6d-a916-08f52426396d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.415466] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Lock "ac1d0e8f-446a-4a6d-a916-08f52426396d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.452698] env[63279]: DEBUG nova.compute.manager [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1791.478995] env[63279]: DEBUG nova.compute.manager [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1791.921752] env[63279]: DEBUG nova.compute.manager [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1792.004079] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.004449] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1792.006995] env[63279]: INFO nova.compute.claims [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1792.013037] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.442432] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1792.453438] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.537061] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Acquiring lock "8c712d0d-55c2-4a14-b759-9441594211e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.537795] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Lock "8c712d0d-55c2-4a14-b759-9441594211e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1792.623023] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquiring lock "a301d225-684d-4f88-bc9b-7e02b8115b9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.623261] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Lock "a301d225-684d-4f88-bc9b-7e02b8115b9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.046196] env[63279]: DEBUG nova.compute.manager [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1793.126040] env[63279]: DEBUG nova.compute.manager [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1793.237145] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a65ec6d-59a4-4468-a270-c8bdaf008d41 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.250117] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e937dd3-7081-4551-9cfe-84e4df00a3eb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.290807] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77abacb-2de1-4f6b-9d60-5c3cee2a59be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.303703] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e208c553-4576-4db0-b546-3ce93ef4cc78 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.321450] env[63279]: DEBUG nova.compute.provider_tree [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1793.522749] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Acquiring lock "21c2bba1-5482-496a-9e2a-f123a94ed48a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.522990] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Lock "21c2bba1-5482-496a-9e2a-f123a94ed48a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.582700] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.648333] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.662064] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Acquiring lock "24bce28c-fc43-4f17-9800-4d980f6729bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.662318] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Lock "24bce28c-fc43-4f17-9800-4d980f6729bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.826175] env[63279]: DEBUG nova.scheduler.client.report [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1794.027566] env[63279]: DEBUG nova.compute.manager [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1794.164159] env[63279]: DEBUG nova.compute.manager [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1794.338217] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.334s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.338864] env[63279]: DEBUG nova.compute.manager [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1794.342331] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.329s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.344057] env[63279]: INFO nova.compute.claims [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1794.567540] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.696050] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.789895] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.790222] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.850546] env[63279]: DEBUG nova.compute.utils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1794.852118] env[63279]: DEBUG nova.compute.manager [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1794.852849] env[63279]: DEBUG nova.network.neutron [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1794.957275] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "5656c853-ac83-47be-83c4-979a9e87ab91" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.957500] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "5656c853-ac83-47be-83c4-979a9e87ab91" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.136198] env[63279]: DEBUG nova.policy [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50f39c336a3c4bba87d48a6443fa88aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf8513bb88ed4de7b1f1e502e33bb85e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1795.298340] env[63279]: DEBUG nova.compute.manager [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1795.358600] env[63279]: DEBUG nova.compute.manager [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1795.462766] env[63279]: DEBUG nova.compute.manager [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1795.542335] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d703e6-29e0-4957-bc73-69faa3e4b196 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.551391] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07413971-c84b-4c11-87db-045f7c158f52 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.585520] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adf6ada-37f1-4b23-95e7-aa936773a349 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.594841] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee62ee0-cf5c-4190-bd4b-6f12dd830476 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.611254] env[63279]: DEBUG nova.compute.provider_tree [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1795.827437] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.987903] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.118385] env[63279]: DEBUG nova.scheduler.client.report [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1796.182759] env[63279]: DEBUG nova.network.neutron [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Successfully created port: 82cbc025-4eb9-496c-902d-6b8b3f82ff45 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1796.369753] env[63279]: DEBUG nova.compute.manager [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1796.400120] env[63279]: DEBUG nova.virt.hardware [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1796.400372] env[63279]: DEBUG nova.virt.hardware [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1796.400527] env[63279]: DEBUG nova.virt.hardware [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1796.400705] env[63279]: DEBUG nova.virt.hardware [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1796.400847] env[63279]: DEBUG nova.virt.hardware [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1796.400993] env[63279]: DEBUG nova.virt.hardware [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1796.401449] env[63279]: DEBUG nova.virt.hardware [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1796.401618] env[63279]: DEBUG nova.virt.hardware [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1796.401795] env[63279]: DEBUG nova.virt.hardware [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1796.401970] env[63279]: DEBUG nova.virt.hardware [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1796.402159] env[63279]: DEBUG nova.virt.hardware [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1796.403356] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f841c4-c568-4e46-b8a6-5469e4cb7a0e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.413150] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22432e70-ba42-4a9d-a157-14dd7ba5bc03 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.432920] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f327246a-e15f-4db5-880d-93c4035a7d6b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.625655] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.283s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.627518] env[63279]: DEBUG nova.compute.manager [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1796.631618] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.177s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.633234] env[63279]: INFO nova.compute.claims [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1797.141253] env[63279]: DEBUG nova.compute.utils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1797.142993] env[63279]: DEBUG nova.compute.manager [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1797.142993] env[63279]: DEBUG nova.network.neutron [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1797.237154] env[63279]: DEBUG nova.policy [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '86b42263f9044626b1b9fdef5c745a5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12ff4ffe2e0748409eae11d95e843dd8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1797.539743] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "4a9088e0-2992-4b18-8be9-6bc70633369b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1797.539743] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "4a9088e0-2992-4b18-8be9-6bc70633369b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.652782] env[63279]: DEBUG nova.compute.manager [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1797.901510] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccfa4d3e-c424-412b-96e2-02633ec6d2da {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.910183] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f674a91-b0ca-4ed4-bce0-bd727c74814d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.956229] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2f24d3-1d00-4248-ac7a-4318bcbf21cc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.964378] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f49ef4-e66e-438b-9e4f-61fe0daa18a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.979337] env[63279]: DEBUG nova.compute.provider_tree [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1798.043098] env[63279]: DEBUG nova.compute.manager [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1798.298169] env[63279]: DEBUG nova.network.neutron [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Successfully created port: ff3873e4-61b9-4b2d-80eb-2ace560fe858 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1798.484233] env[63279]: DEBUG nova.scheduler.client.report [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1798.578661] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.671761] env[63279]: DEBUG nova.compute.manager [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1798.703603] env[63279]: DEBUG nova.virt.hardware [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1798.703603] env[63279]: DEBUG nova.virt.hardware [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1798.703603] env[63279]: DEBUG nova.virt.hardware [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1798.704013] env[63279]: DEBUG nova.virt.hardware [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1798.704393] env[63279]: DEBUG nova.virt.hardware [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1798.704836] env[63279]: DEBUG nova.virt.hardware [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1798.705725] env[63279]: DEBUG nova.virt.hardware [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1798.705725] env[63279]: DEBUG nova.virt.hardware [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1798.706039] env[63279]: DEBUG nova.virt.hardware [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1798.709022] env[63279]: DEBUG nova.virt.hardware [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1798.709022] env[63279]: DEBUG nova.virt.hardware [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1798.709022] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d6a10c-a921-47be-be9a-7c1ed53cdeb7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.718019] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3da281-f23c-4dce-9409-03a2b3b7c66d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.994271] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.364s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.995217] env[63279]: DEBUG nova.compute.manager [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1798.999537] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.418s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.001411] env[63279]: INFO nova.compute.claims [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1799.369745] env[63279]: DEBUG nova.network.neutron [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Successfully updated port: 82cbc025-4eb9-496c-902d-6b8b3f82ff45 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1799.506127] env[63279]: DEBUG nova.compute.utils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1799.509293] env[63279]: DEBUG nova.compute.manager [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1799.509293] env[63279]: DEBUG nova.network.neutron [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1799.683976] env[63279]: DEBUG nova.policy [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7a26dfd910c42a08096e96a3af95526', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72509b0561574a20a75dedf64d53d55d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1799.804628] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Acquiring lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.804846] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.873583] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Acquiring lock "refresh_cache-4063d5e0-1144-40fa-8ed8-efda16730617" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.873956] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Acquired lock "refresh_cache-4063d5e0-1144-40fa-8ed8-efda16730617" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.873956] env[63279]: DEBUG nova.network.neutron [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1800.013309] env[63279]: DEBUG nova.compute.manager [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1800.137540] env[63279]: DEBUG nova.compute.manager [req-4e99a55b-93ff-4384-9afd-6d50613a3c4e req-50370a15-2c68-4ea1-b1c9-587fae8273df service nova] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Received event network-vif-plugged-82cbc025-4eb9-496c-902d-6b8b3f82ff45 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1800.137673] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e99a55b-93ff-4384-9afd-6d50613a3c4e req-50370a15-2c68-4ea1-b1c9-587fae8273df service nova] Acquiring lock "4063d5e0-1144-40fa-8ed8-efda16730617-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.137892] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e99a55b-93ff-4384-9afd-6d50613a3c4e req-50370a15-2c68-4ea1-b1c9-587fae8273df service nova] Lock "4063d5e0-1144-40fa-8ed8-efda16730617-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.138141] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e99a55b-93ff-4384-9afd-6d50613a3c4e req-50370a15-2c68-4ea1-b1c9-587fae8273df service nova] Lock "4063d5e0-1144-40fa-8ed8-efda16730617-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.138256] env[63279]: DEBUG nova.compute.manager [req-4e99a55b-93ff-4384-9afd-6d50613a3c4e req-50370a15-2c68-4ea1-b1c9-587fae8273df service nova] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] No waiting events found dispatching network-vif-plugged-82cbc025-4eb9-496c-902d-6b8b3f82ff45 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1800.138425] env[63279]: WARNING nova.compute.manager [req-4e99a55b-93ff-4384-9afd-6d50613a3c4e req-50370a15-2c68-4ea1-b1c9-587fae8273df service nova] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Received unexpected event network-vif-plugged-82cbc025-4eb9-496c-902d-6b8b3f82ff45 for instance with vm_state building and task_state spawning. [ 1800.298734] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00efe34-957e-457f-b15e-0b6c6ff30dce {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.314536] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896eb008-a719-4bcf-8230-42c148d45042 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.352560] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd2bc91-cc1c-4713-8752-96060534fb1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.360826] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ca4627-9369-40c7-bb7c-5219c1107e96 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.382136] env[63279]: DEBUG nova.compute.provider_tree [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1800.483731] env[63279]: DEBUG nova.network.neutron [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1800.827566] env[63279]: DEBUG nova.network.neutron [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Updating instance_info_cache with network_info: [{"id": "82cbc025-4eb9-496c-902d-6b8b3f82ff45", "address": "fa:16:3e:0a:97:05", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82cbc025-4e", "ovs_interfaceid": "82cbc025-4eb9-496c-902d-6b8b3f82ff45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.886087] env[63279]: DEBUG nova.scheduler.client.report [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1801.020279] env[63279]: DEBUG nova.network.neutron [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Successfully created port: c449bb4b-138e-412d-b39f-b2811a4ac7df {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1801.035190] env[63279]: DEBUG nova.compute.manager [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1801.078176] env[63279]: DEBUG nova.virt.hardware [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1801.078689] env[63279]: DEBUG nova.virt.hardware [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1801.082018] env[63279]: DEBUG nova.virt.hardware [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1801.082018] env[63279]: DEBUG nova.virt.hardware [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1801.082018] env[63279]: DEBUG nova.virt.hardware [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1801.082018] env[63279]: DEBUG nova.virt.hardware [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1801.082018] env[63279]: DEBUG nova.virt.hardware [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1801.082394] env[63279]: DEBUG nova.virt.hardware [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1801.082394] env[63279]: DEBUG nova.virt.hardware [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1801.082394] env[63279]: DEBUG nova.virt.hardware [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1801.082394] env[63279]: DEBUG nova.virt.hardware [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1801.082394] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa651ad8-16c0-4433-8ab9-bccf5d61406f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.094151] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d352ae42-626d-4c97-b991-4f5c402744be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.334890] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Releasing lock "refresh_cache-4063d5e0-1144-40fa-8ed8-efda16730617" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1801.335280] env[63279]: DEBUG nova.compute.manager [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Instance network_info: |[{"id": "82cbc025-4eb9-496c-902d-6b8b3f82ff45", "address": "fa:16:3e:0a:97:05", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82cbc025-4e", "ovs_interfaceid": "82cbc025-4eb9-496c-902d-6b8b3f82ff45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1801.336503] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:97:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '82cbc025-4eb9-496c-902d-6b8b3f82ff45', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1801.348588] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1801.348919] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-063a2d3d-d509-4829-bc07-beaf3c3bad94 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.363434] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Created folder: OpenStack in parent group-v4. [ 1801.363633] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Creating folder: Project (cf8513bb88ed4de7b1f1e502e33bb85e). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1801.363890] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39bc1d04-8c9c-4c0f-8da9-e320553e7cff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.379356] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Created folder: Project (cf8513bb88ed4de7b1f1e502e33bb85e) in parent group-v427491. [ 1801.379356] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Creating folder: Instances. Parent ref: group-v427492. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1801.379356] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7712be19-0f46-4080-a09e-3c69024ce87a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.388721] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Created folder: Instances in parent group-v427492. [ 1801.388998] env[63279]: DEBUG oslo.service.loopingcall [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1801.388998] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1801.389245] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb1c1a1a-4c9a-466d-b846-0582f2fbb8b8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.406802] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.407459] env[63279]: DEBUG nova.compute.manager [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1801.410340] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.762s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.412294] env[63279]: INFO nova.compute.claims [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1801.420213] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1801.420213] env[63279]: value = "task-2086668" [ 1801.420213] env[63279]: _type = "Task" [ 1801.420213] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.431719] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086668, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.546386] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Acquiring lock "abd63285-ee3c-4546-b86d-6d4388765d94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.546386] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Lock "abd63285-ee3c-4546-b86d-6d4388765d94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.646826] env[63279]: DEBUG nova.network.neutron [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Successfully updated port: ff3873e4-61b9-4b2d-80eb-2ace560fe858 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1801.918059] env[63279]: DEBUG nova.compute.utils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1801.923821] env[63279]: DEBUG nova.compute.manager [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1801.923877] env[63279]: DEBUG nova.network.neutron [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1801.940602] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086668, 'name': CreateVM_Task, 'duration_secs': 0.427124} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.940773] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1801.969765] env[63279]: DEBUG oslo_vmware.service [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe8c135-598d-4f4d-8db2-4d1486e8f1e2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.980141] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.980287] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.981650] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1801.986017] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a151a4b-8c73-4f6a-9560-eeb20112743e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.993019] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for the task: (returnval){ [ 1801.993019] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bd01cc-0638-03b4-22a9-70b82a8c3bbc" [ 1801.993019] env[63279]: _type = "Task" [ 1801.993019] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.005110] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bd01cc-0638-03b4-22a9-70b82a8c3bbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.151359] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "refresh_cache-0224e4ea-c13c-4abd-9626-6038c0bbe4e9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.151510] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquired lock "refresh_cache-0224e4ea-c13c-4abd-9626-6038c0bbe4e9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.151674] env[63279]: DEBUG nova.network.neutron [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1802.351514] env[63279]: DEBUG nova.policy [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '75799adac2704308b3a33c27a51d7c44', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b0d0ea7b68be4d078398db9a0896410b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1802.425215] env[63279]: DEBUG nova.compute.manager [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1802.504579] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.504848] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1802.504963] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.505122] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.505517] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1802.505842] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-935389da-33b8-4854-99b7-0ad7916fb87c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.523761] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1802.523852] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1802.525039] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de0c215-af6e-4750-a6dc-e65b4ce49494 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.537921] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d0381a8-ea14-495b-9d31-f0826bc49b4d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.542810] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for the task: (returnval){ [ 1802.542810] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5241ec1b-917f-1ad3-db8b-16a3e8a07ec7" [ 1802.542810] env[63279]: _type = "Task" [ 1802.542810] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.553514] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5241ec1b-917f-1ad3-db8b-16a3e8a07ec7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.643948] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7793cfe4-4478-4aae-817c-9e0b297563e3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.654671] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbaa99c-e571-42de-bcab-deb8cad982b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.696569] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf7b298-68d5-4164-a040-879239c60673 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.705398] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec34c82-7887-4c49-ac45-7d1a98c527ab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.724778] env[63279]: DEBUG nova.compute.provider_tree [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1802.804299] env[63279]: DEBUG nova.network.neutron [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1802.915859] env[63279]: DEBUG nova.compute.manager [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Received event network-changed-82cbc025-4eb9-496c-902d-6b8b3f82ff45 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1802.918089] env[63279]: DEBUG nova.compute.manager [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Refreshing instance network info cache due to event network-changed-82cbc025-4eb9-496c-902d-6b8b3f82ff45. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1802.918314] env[63279]: DEBUG oslo_concurrency.lockutils [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] Acquiring lock "refresh_cache-4063d5e0-1144-40fa-8ed8-efda16730617" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.918429] env[63279]: DEBUG oslo_concurrency.lockutils [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] Acquired lock "refresh_cache-4063d5e0-1144-40fa-8ed8-efda16730617" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.918556] env[63279]: DEBUG nova.network.neutron [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Refreshing network info cache for port 82cbc025-4eb9-496c-902d-6b8b3f82ff45 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1803.055779] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Preparing fetch location {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1803.056150] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Creating directory with path [datastore1] vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d/30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1803.056989] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-abd6541a-429b-48a9-aee7-8ad4b5979c4b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.081055] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Created directory with path [datastore1] vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d/30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1803.081055] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Fetch image to [datastore1] vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d/30887889-e45b-4f67-8b3c-16216e594a90/tmp-sparse.vmdk {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1803.081055] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Downloading image file data 30887889-e45b-4f67-8b3c-16216e594a90 to [datastore1] vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d/30887889-e45b-4f67-8b3c-16216e594a90/tmp-sparse.vmdk on the data store datastore1 {{(pid=63279) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1803.081055] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5dbd80-b7ac-4714-9828-02ab0e362c9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.089313] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0230837-c410-4a9a-852e-ccb8189576e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.098484] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91c90e6-2ddb-418d-8738-2601157e76ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.129361] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027cee62-42b7-4836-b228-f44f6541dcaa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.136050] env[63279]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d2b6c61d-386b-445e-a9d5-dc77d22cedd4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.168690] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Downloading image file data 30887889-e45b-4f67-8b3c-16216e594a90 to the data store datastore1 {{(pid=63279) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1803.227867] env[63279]: DEBUG nova.scheduler.client.report [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1803.250839] env[63279]: DEBUG oslo_vmware.rw_handles [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d/30887889-e45b-4f67-8b3c-16216e594a90/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63279) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1803.446028] env[63279]: DEBUG nova.compute.manager [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1803.480950] env[63279]: DEBUG nova.virt.hardware [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1803.481176] env[63279]: DEBUG nova.virt.hardware [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1803.481310] env[63279]: DEBUG nova.virt.hardware [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1803.481649] env[63279]: DEBUG nova.virt.hardware [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1803.481649] env[63279]: DEBUG nova.virt.hardware [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1803.481736] env[63279]: DEBUG nova.virt.hardware [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1803.481977] env[63279]: DEBUG nova.virt.hardware [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1803.482609] env[63279]: DEBUG nova.virt.hardware [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1803.482862] env[63279]: DEBUG nova.virt.hardware [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1803.483073] env[63279]: DEBUG nova.virt.hardware [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1803.483236] env[63279]: DEBUG nova.virt.hardware [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1803.484539] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ddc394-9119-4d3b-947a-fa07b1533b89 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.498471] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758a70bf-d904-4f51-a54a-9830457b8bd3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.734430] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.323s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.734430] env[63279]: DEBUG nova.compute.manager [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1803.739394] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.172s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.741176] env[63279]: INFO nova.compute.claims [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1803.783491] env[63279]: DEBUG nova.network.neutron [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Updating instance_info_cache with network_info: [{"id": "ff3873e4-61b9-4b2d-80eb-2ace560fe858", "address": "fa:16:3e:4f:c4:9e", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff3873e4-61", "ovs_interfaceid": "ff3873e4-61b9-4b2d-80eb-2ace560fe858", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.879508] env[63279]: DEBUG oslo_vmware.rw_handles [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Completed reading data from the image iterator. {{(pid=63279) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1803.879750] env[63279]: DEBUG oslo_vmware.rw_handles [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d/30887889-e45b-4f67-8b3c-16216e594a90/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1804.048566] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Downloaded image file data 30887889-e45b-4f67-8b3c-16216e594a90 to vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d/30887889-e45b-4f67-8b3c-16216e594a90/tmp-sparse.vmdk on the data store datastore1 {{(pid=63279) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1804.050491] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Caching image {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1804.050753] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Copying Virtual Disk [datastore1] vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d/30887889-e45b-4f67-8b3c-16216e594a90/tmp-sparse.vmdk to [datastore1] vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1804.056383] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb0ac661-2932-4d40-8367-bce443240069 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.065193] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for the task: (returnval){ [ 1804.065193] env[63279]: value = "task-2086669" [ 1804.065193] env[63279]: _type = "Task" [ 1804.065193] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.077295] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.247104] env[63279]: DEBUG nova.compute.utils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1804.252382] env[63279]: DEBUG nova.compute.manager [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1804.252568] env[63279]: DEBUG nova.network.neutron [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1804.296376] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Releasing lock "refresh_cache-0224e4ea-c13c-4abd-9626-6038c0bbe4e9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1804.296703] env[63279]: DEBUG nova.compute.manager [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Instance network_info: |[{"id": "ff3873e4-61b9-4b2d-80eb-2ace560fe858", "address": "fa:16:3e:4f:c4:9e", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff3873e4-61", "ovs_interfaceid": "ff3873e4-61b9-4b2d-80eb-2ace560fe858", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1804.297159] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:c4:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff3873e4-61b9-4b2d-80eb-2ace560fe858', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1804.308707] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Creating folder: Project (12ff4ffe2e0748409eae11d95e843dd8). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1804.312901] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b20957e-db51-48a3-a25b-099231aa565e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.327177] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Created folder: Project (12ff4ffe2e0748409eae11d95e843dd8) in parent group-v427491. [ 1804.327321] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Creating folder: Instances. Parent ref: group-v427495. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1804.327502] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-15038de2-d35e-46d8-a87e-f1f175a8e744 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.337891] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Created folder: Instances in parent group-v427495. [ 1804.341312] env[63279]: DEBUG oslo.service.loopingcall [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1804.341312] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1804.341312] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c3792e7-b0da-4770-9cfc-5438f21ed17b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.365350] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1804.365350] env[63279]: value = "task-2086672" [ 1804.365350] env[63279]: _type = "Task" [ 1804.365350] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.379269] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086672, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.576720] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086669, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.607354] env[63279]: DEBUG nova.network.neutron [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Updated VIF entry in instance network info cache for port 82cbc025-4eb9-496c-902d-6b8b3f82ff45. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1804.607665] env[63279]: DEBUG nova.network.neutron [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Updating instance_info_cache with network_info: [{"id": "82cbc025-4eb9-496c-902d-6b8b3f82ff45", "address": "fa:16:3e:0a:97:05", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82cbc025-4e", "ovs_interfaceid": "82cbc025-4eb9-496c-902d-6b8b3f82ff45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1804.656872] env[63279]: DEBUG nova.policy [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87bda619a8044645b38e1468a7e5b760', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '930a294f790e4ba58a1295ce3240f10a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1804.666310] env[63279]: DEBUG nova.network.neutron [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Successfully created port: a1328084-8103-45cf-a3f8-b825bebe06c0 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1804.758859] env[63279]: DEBUG nova.compute.manager [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1804.888705] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086672, 'name': CreateVM_Task, 'duration_secs': 0.42075} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.888705] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1804.888705] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.888705] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.888705] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1804.889148] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bec2e86-e42a-4491-b191-72f29cd9df7d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.894706] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1804.894706] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52697910-c070-8369-2bf0-d21393e786b0" [ 1804.894706] env[63279]: _type = "Task" [ 1804.894706] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.911670] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1804.911829] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1804.911941] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.027749] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce14185-7e6d-4d57-8c9b-bd9e020f3cd5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.038361] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a80f9e-b5e1-4e9c-b004-5acaa4941940 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.083717] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4144a87e-24f4-4f15-8cdd-02988eedec02 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.098673] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086669, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.696934} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.100416] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Copied Virtual Disk [datastore1] vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d/30887889-e45b-4f67-8b3c-16216e594a90/tmp-sparse.vmdk to [datastore1] vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1805.100606] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Deleting the datastore file [datastore1] vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d/30887889-e45b-4f67-8b3c-16216e594a90/tmp-sparse.vmdk {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1805.100861] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c46b3ba9-32cc-4cb4-95b1-de4c9db714b2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.109504] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e7422c-3abd-4a21-a712-e2e7581ac62b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.113850] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for the task: (returnval){ [ 1805.113850] env[63279]: value = "task-2086673" [ 1805.113850] env[63279]: _type = "Task" [ 1805.113850] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.114350] env[63279]: DEBUG oslo_concurrency.lockutils [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] Releasing lock "refresh_cache-4063d5e0-1144-40fa-8ed8-efda16730617" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.114579] env[63279]: DEBUG nova.compute.manager [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Received event network-vif-plugged-ff3873e4-61b9-4b2d-80eb-2ace560fe858 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1805.114772] env[63279]: DEBUG oslo_concurrency.lockutils [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] Acquiring lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.115116] env[63279]: DEBUG oslo_concurrency.lockutils [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] Lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.115377] env[63279]: DEBUG oslo_concurrency.lockutils [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] Lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.115978] env[63279]: DEBUG nova.compute.manager [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] No waiting events found dispatching network-vif-plugged-ff3873e4-61b9-4b2d-80eb-2ace560fe858 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1805.115978] env[63279]: WARNING nova.compute.manager [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Received unexpected event network-vif-plugged-ff3873e4-61b9-4b2d-80eb-2ace560fe858 for instance with vm_state building and task_state spawning. [ 1805.115978] env[63279]: DEBUG nova.compute.manager [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Received event network-changed-ff3873e4-61b9-4b2d-80eb-2ace560fe858 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1805.116159] env[63279]: DEBUG nova.compute.manager [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Refreshing instance network info cache due to event network-changed-ff3873e4-61b9-4b2d-80eb-2ace560fe858. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1805.116251] env[63279]: DEBUG oslo_concurrency.lockutils [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] Acquiring lock "refresh_cache-0224e4ea-c13c-4abd-9626-6038c0bbe4e9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.116832] env[63279]: DEBUG oslo_concurrency.lockutils [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] Acquired lock "refresh_cache-0224e4ea-c13c-4abd-9626-6038c0bbe4e9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.116832] env[63279]: DEBUG nova.network.neutron [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Refreshing network info cache for port ff3873e4-61b9-4b2d-80eb-2ace560fe858 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1805.130624] env[63279]: DEBUG nova.compute.provider_tree [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1805.137667] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.633055] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033451} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.634661] env[63279]: DEBUG nova.scheduler.client.report [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1805.639241] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1805.639486] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Moving file from [datastore1] vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d/30887889-e45b-4f67-8b3c-16216e594a90 to [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90. {{(pid=63279) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1805.640178] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-ccb80b3f-1d95-4d50-85cb-3992f578113e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.648702] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for the task: (returnval){ [ 1805.648702] env[63279]: value = "task-2086674" [ 1805.648702] env[63279]: _type = "Task" [ 1805.648702] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.659536] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086674, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.701870] env[63279]: DEBUG nova.network.neutron [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Successfully updated port: c449bb4b-138e-412d-b39f-b2811a4ac7df {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1805.773975] env[63279]: DEBUG nova.compute.manager [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1805.805694] env[63279]: DEBUG nova.virt.hardware [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1805.805952] env[63279]: DEBUG nova.virt.hardware [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1805.807332] env[63279]: DEBUG nova.virt.hardware [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1805.807420] env[63279]: DEBUG nova.virt.hardware [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1805.807516] env[63279]: DEBUG nova.virt.hardware [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1805.807678] env[63279]: DEBUG nova.virt.hardware [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1805.807903] env[63279]: DEBUG nova.virt.hardware [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1805.808081] env[63279]: DEBUG nova.virt.hardware [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1805.808256] env[63279]: DEBUG nova.virt.hardware [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1805.808421] env[63279]: DEBUG nova.virt.hardware [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1805.808595] env[63279]: DEBUG nova.virt.hardware [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1805.809677] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e5f9e0-ea8b-412d-bf82-23664abae67d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.822895] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3fcd34-de40-4579-871d-f5232c408e1d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.140846] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.401s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.141563] env[63279]: DEBUG nova.compute.manager [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1806.145120] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.450s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.152318] env[63279]: INFO nova.compute.claims [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1806.172275] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086674, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.06158} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.173058] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] File moved {{(pid=63279) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1806.173058] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Cleaning up location [datastore1] vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1806.173908] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Deleting the datastore file [datastore1] vmware_temp/7486b4af-f4c5-494d-bb5e-37c7b5cfed2d {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1806.173908] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29ce3498-a8ff-43b2-8deb-8dbf318be3f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.183021] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for the task: (returnval){ [ 1806.183021] env[63279]: value = "task-2086675" [ 1806.183021] env[63279]: _type = "Task" [ 1806.183021] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.198475] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.209410] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Acquiring lock "refresh_cache-ac1d0e8f-446a-4a6d-a916-08f52426396d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.209410] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Acquired lock "refresh_cache-ac1d0e8f-446a-4a6d-a916-08f52426396d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.209410] env[63279]: DEBUG nova.network.neutron [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1806.595233] env[63279]: DEBUG nova.network.neutron [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Successfully created port: 05cc981a-fb6c-4723-8b86-d58715a9c5e9 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1806.661547] env[63279]: DEBUG nova.compute.utils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1806.664741] env[63279]: DEBUG nova.compute.manager [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1806.664741] env[63279]: DEBUG nova.network.neutron [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1806.696257] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034479} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.696538] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1806.697893] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-133dbcee-581a-472c-bec7-27ef496b24b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.703968] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for the task: (returnval){ [ 1806.703968] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52855295-5e5e-4b09-1ff1-fb69745ca14d" [ 1806.703968] env[63279]: _type = "Task" [ 1806.703968] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.722514] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52855295-5e5e-4b09-1ff1-fb69745ca14d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.827130] env[63279]: DEBUG nova.compute.manager [req-a7443732-5977-44dc-8051-3280f4d84b47 req-6c43f779-594d-4c33-9238-fdbe88584145 service nova] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Received event network-vif-plugged-c449bb4b-138e-412d-b39f-b2811a4ac7df {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1806.827528] env[63279]: DEBUG oslo_concurrency.lockutils [req-a7443732-5977-44dc-8051-3280f4d84b47 req-6c43f779-594d-4c33-9238-fdbe88584145 service nova] Acquiring lock "ac1d0e8f-446a-4a6d-a916-08f52426396d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.827528] env[63279]: DEBUG oslo_concurrency.lockutils [req-a7443732-5977-44dc-8051-3280f4d84b47 req-6c43f779-594d-4c33-9238-fdbe88584145 service nova] Lock "ac1d0e8f-446a-4a6d-a916-08f52426396d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.827664] env[63279]: DEBUG oslo_concurrency.lockutils [req-a7443732-5977-44dc-8051-3280f4d84b47 req-6c43f779-594d-4c33-9238-fdbe88584145 service nova] Lock "ac1d0e8f-446a-4a6d-a916-08f52426396d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.827905] env[63279]: DEBUG nova.compute.manager [req-a7443732-5977-44dc-8051-3280f4d84b47 req-6c43f779-594d-4c33-9238-fdbe88584145 service nova] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] No waiting events found dispatching network-vif-plugged-c449bb4b-138e-412d-b39f-b2811a4ac7df {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1806.827994] env[63279]: WARNING nova.compute.manager [req-a7443732-5977-44dc-8051-3280f4d84b47 req-6c43f779-594d-4c33-9238-fdbe88584145 service nova] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Received unexpected event network-vif-plugged-c449bb4b-138e-412d-b39f-b2811a4ac7df for instance with vm_state building and task_state spawning. [ 1806.858414] env[63279]: DEBUG nova.network.neutron [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1806.870991] env[63279]: DEBUG nova.network.neutron [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Updated VIF entry in instance network info cache for port ff3873e4-61b9-4b2d-80eb-2ace560fe858. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1806.871219] env[63279]: DEBUG nova.network.neutron [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Updating instance_info_cache with network_info: [{"id": "ff3873e4-61b9-4b2d-80eb-2ace560fe858", "address": "fa:16:3e:4f:c4:9e", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff3873e4-61", "ovs_interfaceid": "ff3873e4-61b9-4b2d-80eb-2ace560fe858", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1807.043885] env[63279]: DEBUG nova.policy [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '03eed819d499410f8c9ad3fe5ca5a601', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'abb95b37d6914c4f8624f2f924a82f3c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1807.173487] env[63279]: DEBUG nova.compute.manager [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1807.226617] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52855295-5e5e-4b09-1ff1-fb69745ca14d, 'name': SearchDatastore_Task, 'duration_secs': 0.026037} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.226617] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.226617] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 4063d5e0-1144-40fa-8ed8-efda16730617/4063d5e0-1144-40fa-8ed8-efda16730617.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1807.227190] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.227190] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1807.227394] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1fff3549-63f0-4c35-9612-695ab013378f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.235449] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-549a7245-0a4d-45da-8536-35eec45a92c0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.245075] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for the task: (returnval){ [ 1807.245075] env[63279]: value = "task-2086676" [ 1807.245075] env[63279]: _type = "Task" [ 1807.245075] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.246431] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1807.246431] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1807.251205] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c7490e8-b25b-45aa-9d25-51260de4aaf4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.264461] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086676, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.266660] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1807.266660] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ba9f59-ff9b-7ef2-6a57-5a0e7c41e9eb" [ 1807.266660] env[63279]: _type = "Task" [ 1807.266660] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.281538] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ba9f59-ff9b-7ef2-6a57-5a0e7c41e9eb, 'name': SearchDatastore_Task, 'duration_secs': 0.007946} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.281538] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d07a1846-1d06-4023-95d6-637ca50117be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.286213] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1807.286213] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]524f7904-068c-2773-6979-dd85833240dd" [ 1807.286213] env[63279]: _type = "Task" [ 1807.286213] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.295277] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524f7904-068c-2773-6979-dd85833240dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.327644] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquiring lock "fcc5a636-554f-424e-a604-a8e7bd7cf574" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1807.327904] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Lock "fcc5a636-554f-424e-a604-a8e7bd7cf574" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.377065] env[63279]: DEBUG oslo_concurrency.lockutils [req-e9231d5f-e930-4b8e-9479-fa6db334ab77 req-ed586fb8-5700-43d2-a8ca-0eafea241673 service nova] Releasing lock "refresh_cache-0224e4ea-c13c-4abd-9626-6038c0bbe4e9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.443879] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64983db-d786-42e9-89ef-3b72208f1b32 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.457326] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6ea7ec-7ed8-4dcc-96d7-4da06afa0681 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.495026] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c69a6b2-b293-41c0-9dc2-0ff66c66f17e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.507892] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa5531d-4531-47d8-9ede-d2c2de6ddf0e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.530104] env[63279]: DEBUG nova.compute.provider_tree [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1807.756164] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086676, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.798516] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524f7904-068c-2773-6979-dd85833240dd, 'name': SearchDatastore_Task, 'duration_secs': 0.008833} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.799201] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.799480] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 0224e4ea-c13c-4abd-9626-6038c0bbe4e9/0224e4ea-c13c-4abd-9626-6038c0bbe4e9.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1807.799750] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8d5bd81-8884-4dcf-aa60-5da26aa8b34f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.811153] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1807.811153] env[63279]: value = "task-2086677" [ 1807.811153] env[63279]: _type = "Task" [ 1807.811153] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.817757] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086677, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.885808] env[63279]: DEBUG nova.network.neutron [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Updating instance_info_cache with network_info: [{"id": "c449bb4b-138e-412d-b39f-b2811a4ac7df", "address": "fa:16:3e:39:d1:65", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc449bb4b-13", "ovs_interfaceid": "c449bb4b-138e-412d-b39f-b2811a4ac7df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1808.034026] env[63279]: DEBUG nova.scheduler.client.report [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1808.186026] env[63279]: DEBUG nova.compute.manager [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1808.228734] env[63279]: DEBUG nova.virt.hardware [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1808.229173] env[63279]: DEBUG nova.virt.hardware [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1808.229173] env[63279]: DEBUG nova.virt.hardware [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1808.229513] env[63279]: DEBUG nova.virt.hardware [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1808.229513] env[63279]: DEBUG nova.virt.hardware [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1808.229980] env[63279]: DEBUG nova.virt.hardware [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1808.229980] env[63279]: DEBUG nova.virt.hardware [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1808.230154] env[63279]: DEBUG nova.virt.hardware [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1808.232330] env[63279]: DEBUG nova.virt.hardware [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1808.232568] env[63279]: DEBUG nova.virt.hardware [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1808.232712] env[63279]: DEBUG nova.virt.hardware [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1808.236296] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42f79b1-ecb5-4bf1-9b34-d5ae5719905d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.246862] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ac8f41-f068-4cb8-af05-c9cebc96f3b1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.281431] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086676, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530939} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.281836] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 4063d5e0-1144-40fa-8ed8-efda16730617/4063d5e0-1144-40fa-8ed8-efda16730617.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1808.281836] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1808.282313] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed69f9c5-862e-4cba-8da7-2b6576bd2400 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.293631] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for the task: (returnval){ [ 1808.293631] env[63279]: value = "task-2086678" [ 1808.293631] env[63279]: _type = "Task" [ 1808.293631] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.306699] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086678, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.319893] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086677, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.391566] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Releasing lock "refresh_cache-ac1d0e8f-446a-4a6d-a916-08f52426396d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1808.392166] env[63279]: DEBUG nova.compute.manager [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Instance network_info: |[{"id": "c449bb4b-138e-412d-b39f-b2811a4ac7df", "address": "fa:16:3e:39:d1:65", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc449bb4b-13", "ovs_interfaceid": "c449bb4b-138e-412d-b39f-b2811a4ac7df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1808.393358] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:d1:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c449bb4b-138e-412d-b39f-b2811a4ac7df', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1808.404825] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Creating folder: Project (72509b0561574a20a75dedf64d53d55d). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1808.406029] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd4447b1-60ca-4cff-a373-05ecec043547 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.419862] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Created folder: Project (72509b0561574a20a75dedf64d53d55d) in parent group-v427491. [ 1808.423265] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Creating folder: Instances. Parent ref: group-v427498. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1808.423265] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ef1760d-7b76-4d4d-a8b4-6fb01de44b3d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.436130] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Created folder: Instances in parent group-v427498. [ 1808.436399] env[63279]: DEBUG oslo.service.loopingcall [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1808.437731] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1808.437731] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbe3b6a1-9625-44ed-95ad-395ccbbe9c77 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.464046] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1808.464046] env[63279]: value = "task-2086681" [ 1808.464046] env[63279]: _type = "Task" [ 1808.464046] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.472011] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086681, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.543938] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.543938] env[63279]: DEBUG nova.compute.manager [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1808.544275] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.717s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.554836] env[63279]: INFO nova.compute.claims [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1808.807807] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086678, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067852} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.808199] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1808.810090] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c306bc01-c06d-404f-a3fb-b2a80e65520b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.826865] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086677, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511264} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.842534] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 0224e4ea-c13c-4abd-9626-6038c0bbe4e9/0224e4ea-c13c-4abd-9626-6038c0bbe4e9.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1808.842855] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1808.857029] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 4063d5e0-1144-40fa-8ed8-efda16730617/4063d5e0-1144-40fa-8ed8-efda16730617.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1808.857108] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca40820b-934e-46c4-a289-220eaedfe22e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.859452] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea0db43d-07ca-4972-87a4-86f8fd8a0ecf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.879425] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1808.879425] env[63279]: value = "task-2086682" [ 1808.879425] env[63279]: _type = "Task" [ 1808.879425] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.881756] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for the task: (returnval){ [ 1808.881756] env[63279]: value = "task-2086683" [ 1808.881756] env[63279]: _type = "Task" [ 1808.881756] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.898435] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086683, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.898821] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086682, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.976883] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086681, 'name': CreateVM_Task, 'duration_secs': 0.372155} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.977072] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1808.977783] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1808.978188] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1808.978300] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1808.978762] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c527dd8d-e3fa-499f-987d-ce99cccd056f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.985706] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Waiting for the task: (returnval){ [ 1808.985706] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]524f10f1-fcb5-4347-24cc-74916a0680d9" [ 1808.985706] env[63279]: _type = "Task" [ 1808.985706] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.994873] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524f10f1-fcb5-4347-24cc-74916a0680d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.067465] env[63279]: DEBUG nova.compute.utils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1809.071679] env[63279]: DEBUG nova.compute.manager [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1809.071679] env[63279]: DEBUG nova.network.neutron [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1809.193604] env[63279]: DEBUG nova.network.neutron [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Successfully created port: c3f2f036-d3a2-4b32-b33a-516605f2a1b1 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1809.387059] env[63279]: DEBUG nova.policy [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '80d5f2c396fa4b6a8294ace094005aee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c54ed03fdab9451b9f9613a306deb381', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1809.405773] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086682, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068879} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.406240] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086683, 'name': ReconfigVM_Task, 'duration_secs': 0.286218} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.406539] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1809.406961] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 4063d5e0-1144-40fa-8ed8-efda16730617/4063d5e0-1144-40fa-8ed8-efda16730617.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1809.409784] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65fb1c80-e83a-432b-951a-bd443b4e3f3a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.411992] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4f80fff-975b-43f4-a38d-b5bb12b39a79 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.436517] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 0224e4ea-c13c-4abd-9626-6038c0bbe4e9/0224e4ea-c13c-4abd-9626-6038c0bbe4e9.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1809.437791] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d662ac4-3c84-4ea2-9ef9-9bdbdc3a9c67 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.455246] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for the task: (returnval){ [ 1809.455246] env[63279]: value = "task-2086684" [ 1809.455246] env[63279]: _type = "Task" [ 1809.455246] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.460671] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1809.460671] env[63279]: value = "task-2086685" [ 1809.460671] env[63279]: _type = "Task" [ 1809.460671] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.464481] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086684, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.472606] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086685, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.478450] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "c05e9aaa-e251-480c-94d6-56c29bb6282d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.481285] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "c05e9aaa-e251-480c-94d6-56c29bb6282d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1809.499198] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524f10f1-fcb5-4347-24cc-74916a0680d9, 'name': SearchDatastore_Task, 'duration_secs': 0.012856} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.499515] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1809.499772] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1809.500027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.500175] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.500351] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1809.500677] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1ff34d6-d5f9-4922-ae32-0af49295aa5a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.510623] env[63279]: DEBUG nova.network.neutron [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Successfully updated port: a1328084-8103-45cf-a3f8-b825bebe06c0 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1809.510623] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1809.510623] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1809.513490] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1cd4d69-f21a-455b-98ff-a712308f7508 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.521989] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Waiting for the task: (returnval){ [ 1809.521989] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52af00aa-7319-e650-df94-bd8f023b7547" [ 1809.521989] env[63279]: _type = "Task" [ 1809.521989] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.522296] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "3d4db43f-5784-46e1-9710-f6becec011e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.522464] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "3d4db43f-5784-46e1-9710-f6becec011e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1809.533213] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52af00aa-7319-e650-df94-bd8f023b7547, 'name': SearchDatastore_Task, 'duration_secs': 0.008609} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.534764] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b560efd-9208-4674-a47b-e0a00eb33b5c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.544693] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Waiting for the task: (returnval){ [ 1809.544693] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52655e0a-f3ca-3d63-cfdb-520b045acfc2" [ 1809.544693] env[63279]: _type = "Task" [ 1809.544693] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.559956] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52655e0a-f3ca-3d63-cfdb-520b045acfc2, 'name': SearchDatastore_Task, 'duration_secs': 0.009436} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.560296] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1809.560585] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ac1d0e8f-446a-4a6d-a916-08f52426396d/ac1d0e8f-446a-4a6d-a916-08f52426396d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1809.561135] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-302a3d98-4bc3-4a1b-8773-470113538ca4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.568226] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Waiting for the task: (returnval){ [ 1809.568226] env[63279]: value = "task-2086686" [ 1809.568226] env[63279]: _type = "Task" [ 1809.568226] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.573566] env[63279]: DEBUG nova.compute.manager [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1809.591726] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086686, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.887202] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a28ba2-399e-4c51-82f1-81d323d7ac00 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.901144] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c203c2-bea8-474b-bbe7-033f3e964163 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.940608] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0081c4-c88d-4a95-8394-605e4055ef97 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.951811] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca3bdbf-ace8-4a20-ae1d-8ec80e39d0b1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.969213] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086684, 'name': Rename_Task, 'duration_secs': 0.163111} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.984146] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1809.984146] env[63279]: DEBUG nova.compute.provider_tree [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1809.985597] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5d362d3-31d8-4215-8cb0-425cf6f7133c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.997075] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086685, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.000224] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for the task: (returnval){ [ 1810.000224] env[63279]: value = "task-2086687" [ 1810.000224] env[63279]: _type = "Task" [ 1810.000224] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.014359] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Acquiring lock "refresh_cache-8c712d0d-55c2-4a14-b759-9441594211e1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1810.014359] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Acquired lock "refresh_cache-8c712d0d-55c2-4a14-b759-9441594211e1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1810.014359] env[63279]: DEBUG nova.network.neutron [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1810.014359] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086687, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.085195] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086686, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505129} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.085195] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ac1d0e8f-446a-4a6d-a916-08f52426396d/ac1d0e8f-446a-4a6d-a916-08f52426396d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1810.085195] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1810.085195] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bb54205-182e-4b1b-9c39-b15253cc9f4b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.092962] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Waiting for the task: (returnval){ [ 1810.092962] env[63279]: value = "task-2086688" [ 1810.092962] env[63279]: _type = "Task" [ 1810.092962] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.101607] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086688, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.153176] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Acquiring lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1810.153466] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.482877] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086685, 'name': ReconfigVM_Task, 'duration_secs': 0.584259} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.483236] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 0224e4ea-c13c-4abd-9626-6038c0bbe4e9/0224e4ea-c13c-4abd-9626-6038c0bbe4e9.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1810.484050] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3cf194e9-d99a-4290-9f29-f00e3a02ad6a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.492167] env[63279]: DEBUG nova.scheduler.client.report [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1810.499078] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1810.499078] env[63279]: value = "task-2086689" [ 1810.499078] env[63279]: _type = "Task" [ 1810.499078] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.509380] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086689, 'name': Rename_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.517302] env[63279]: DEBUG oslo_vmware.api [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086687, 'name': PowerOnVM_Task, 'duration_secs': 0.492234} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.518424] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1810.518995] env[63279]: INFO nova.compute.manager [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Took 14.15 seconds to spawn the instance on the hypervisor. [ 1810.519150] env[63279]: DEBUG nova.compute.manager [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1810.520564] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b3ea63-229f-4e6f-845d-c8ead4118e99 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.594145] env[63279]: DEBUG nova.compute.manager [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1810.608291] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086688, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.136232} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.608746] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1810.609584] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11cafdb5-2839-4efb-b764-7abb35e0fd1a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.635494] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] ac1d0e8f-446a-4a6d-a916-08f52426396d/ac1d0e8f-446a-4a6d-a916-08f52426396d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1810.637456] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5bc644bf-507a-44a0-8d8f-831a6e392cfc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.659696] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Waiting for the task: (returnval){ [ 1810.659696] env[63279]: value = "task-2086690" [ 1810.659696] env[63279]: _type = "Task" [ 1810.659696] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.668021] env[63279]: DEBUG nova.virt.hardware [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1810.668021] env[63279]: DEBUG nova.virt.hardware [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1810.668021] env[63279]: DEBUG nova.virt.hardware [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1810.668264] env[63279]: DEBUG nova.virt.hardware [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1810.668264] env[63279]: DEBUG nova.virt.hardware [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1810.668264] env[63279]: DEBUG nova.virt.hardware [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1810.668264] env[63279]: DEBUG nova.virt.hardware [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1810.668264] env[63279]: DEBUG nova.virt.hardware [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1810.668438] env[63279]: DEBUG nova.virt.hardware [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1810.668438] env[63279]: DEBUG nova.virt.hardware [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1810.668438] env[63279]: DEBUG nova.virt.hardware [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1810.669028] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a3270d-159d-467f-8533-744ea2bf78f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.678197] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086690, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.681565] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f63f61-74e0-4e89-9a83-8569db646888 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.686322] env[63279]: DEBUG nova.network.neutron [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1810.927686] env[63279]: DEBUG nova.network.neutron [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Successfully updated port: 05cc981a-fb6c-4723-8b86-d58715a9c5e9 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1810.998673] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.998793] env[63279]: DEBUG nova.compute.manager [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1811.002016] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.014s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.006314] env[63279]: INFO nova.compute.claims [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1811.022828] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086689, 'name': Rename_Task, 'duration_secs': 0.153811} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.022828] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1811.022828] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea730af2-f841-4306-8dff-755edb10eb3f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.033138] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1811.033138] env[63279]: value = "task-2086691" [ 1811.033138] env[63279]: _type = "Task" [ 1811.033138] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.082085] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086691, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.089929] env[63279]: INFO nova.compute.manager [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Took 19.13 seconds to build instance. [ 1811.170325] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086690, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.208716] env[63279]: DEBUG nova.compute.manager [req-20eafdc2-39eb-4ecd-ae51-895f8affe586 req-a6d99fb8-972b-48e8-991b-dcf2a62237ed service nova] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Received event network-changed-c449bb4b-138e-412d-b39f-b2811a4ac7df {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1811.208944] env[63279]: DEBUG nova.compute.manager [req-20eafdc2-39eb-4ecd-ae51-895f8affe586 req-a6d99fb8-972b-48e8-991b-dcf2a62237ed service nova] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Refreshing instance network info cache due to event network-changed-c449bb4b-138e-412d-b39f-b2811a4ac7df. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1811.209286] env[63279]: DEBUG oslo_concurrency.lockutils [req-20eafdc2-39eb-4ecd-ae51-895f8affe586 req-a6d99fb8-972b-48e8-991b-dcf2a62237ed service nova] Acquiring lock "refresh_cache-ac1d0e8f-446a-4a6d-a916-08f52426396d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.209516] env[63279]: DEBUG oslo_concurrency.lockutils [req-20eafdc2-39eb-4ecd-ae51-895f8affe586 req-a6d99fb8-972b-48e8-991b-dcf2a62237ed service nova] Acquired lock "refresh_cache-ac1d0e8f-446a-4a6d-a916-08f52426396d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.209647] env[63279]: DEBUG nova.network.neutron [req-20eafdc2-39eb-4ecd-ae51-895f8affe586 req-a6d99fb8-972b-48e8-991b-dcf2a62237ed service nova] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Refreshing network info cache for port c449bb4b-138e-412d-b39f-b2811a4ac7df {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1811.213149] env[63279]: DEBUG nova.network.neutron [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Successfully created port: c3997b27-53bf-4f2d-828a-a55447612331 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1811.428968] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquiring lock "refresh_cache-a301d225-684d-4f88-bc9b-7e02b8115b9d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.429157] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquired lock "refresh_cache-a301d225-684d-4f88-bc9b-7e02b8115b9d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.429326] env[63279]: DEBUG nova.network.neutron [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1811.484135] env[63279]: DEBUG nova.network.neutron [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Updating instance_info_cache with network_info: [{"id": "a1328084-8103-45cf-a3f8-b825bebe06c0", "address": "fa:16:3e:e0:86:46", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.80", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1328084-81", "ovs_interfaceid": "a1328084-8103-45cf-a3f8-b825bebe06c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1811.506569] env[63279]: DEBUG nova.compute.utils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1811.508350] env[63279]: DEBUG nova.compute.manager [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1811.509224] env[63279]: DEBUG nova.network.neutron [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1811.548140] env[63279]: DEBUG oslo_vmware.api [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086691, 'name': PowerOnVM_Task, 'duration_secs': 0.505825} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.548140] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1811.548140] env[63279]: INFO nova.compute.manager [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Took 12.87 seconds to spawn the instance on the hypervisor. [ 1811.548140] env[63279]: DEBUG nova.compute.manager [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1811.548140] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77d8aad-2604-466d-b57f-f16c4221b5bf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.593032] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3f0594b7-45ab-4426-9a9f-6d257821bf50 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Lock "4063d5e0-1144-40fa-8ed8-efda16730617" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.647s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.672769] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086690, 'name': ReconfigVM_Task, 'duration_secs': 0.729453} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.672769] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Reconfigured VM instance instance-00000003 to attach disk [datastore1] ac1d0e8f-446a-4a6d-a916-08f52426396d/ac1d0e8f-446a-4a6d-a916-08f52426396d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1811.673516] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35ae8b03-f320-4fd2-8c2f-91fbf536a12c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.683263] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Waiting for the task: (returnval){ [ 1811.683263] env[63279]: value = "task-2086692" [ 1811.683263] env[63279]: _type = "Task" [ 1811.683263] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.692083] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086692, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.695075] env[63279]: DEBUG nova.policy [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c9e365ca4f941f19cd9e82676a52acc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f6c6f65521a440fb80278bbff2d0ed0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1811.986792] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Releasing lock "refresh_cache-8c712d0d-55c2-4a14-b759-9441594211e1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1811.987157] env[63279]: DEBUG nova.compute.manager [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Instance network_info: |[{"id": "a1328084-8103-45cf-a3f8-b825bebe06c0", "address": "fa:16:3e:e0:86:46", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.80", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1328084-81", "ovs_interfaceid": "a1328084-8103-45cf-a3f8-b825bebe06c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1811.987576] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:86:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a1328084-8103-45cf-a3f8-b825bebe06c0', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1812.001616] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Creating folder: Project (b0d0ea7b68be4d078398db9a0896410b). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1812.002528] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92c37ce7-f4ef-41c3-9e00-b5a539906176 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.010290] env[63279]: DEBUG nova.compute.manager [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1812.029030] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Created folder: Project (b0d0ea7b68be4d078398db9a0896410b) in parent group-v427491. [ 1812.030282] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Creating folder: Instances. Parent ref: group-v427501. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1812.031718] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-efeffed9-849a-4fba-9df4-3815bcb4e8f9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.039377] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Created folder: Instances in parent group-v427501. [ 1812.039639] env[63279]: DEBUG oslo.service.loopingcall [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1812.039835] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1812.040330] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e82f669c-16ee-445b-9626-a29905650696 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.066424] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1812.066424] env[63279]: value = "task-2086695" [ 1812.066424] env[63279]: _type = "Task" [ 1812.066424] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.068596] env[63279]: INFO nova.compute.manager [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Took 20.08 seconds to build instance. [ 1812.080621] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086695, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.097462] env[63279]: DEBUG nova.compute.manager [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1812.206695] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086692, 'name': Rename_Task, 'duration_secs': 0.403315} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.207076] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1812.207367] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f8c1431-0452-4642-90ee-6d4fb33f184a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.217295] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Waiting for the task: (returnval){ [ 1812.217295] env[63279]: value = "task-2086696" [ 1812.217295] env[63279]: _type = "Task" [ 1812.217295] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.236568] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086696, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.237717] env[63279]: DEBUG nova.network.neutron [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1812.359172] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc75d5d2-2310-4c72-9aec-26497c27d053 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.367548] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0cbbb7-b230-43d1-9c63-dcbf7131a257 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.404151] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae64995-803b-4bd4-bd77-b1100557c8fd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.412568] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c83850-f4e1-4b54-a340-656470290e39 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.429392] env[63279]: DEBUG nova.compute.provider_tree [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1812.583442] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e79427e-efa2-427f-9c45-14942fa607b4 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.609s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.583956] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086695, 'name': CreateVM_Task, 'duration_secs': 0.415873} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.585300] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1812.585760] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.585919] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.590724] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1812.591439] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c4363f8-a709-4d68-8547-d0d396bd1c08 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.600928] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Waiting for the task: (returnval){ [ 1812.600928] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52dfea72-4de9-f69b-7af5-e1eb23ab886c" [ 1812.600928] env[63279]: _type = "Task" [ 1812.600928] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.637813] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52dfea72-4de9-f69b-7af5-e1eb23ab886c, 'name': SearchDatastore_Task, 'duration_secs': 0.008796} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.638248] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.638409] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1812.639105] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.639105] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.639105] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1812.648517] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a3b0171-bb66-48c8-a381-aea22a374546 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.653975] env[63279]: DEBUG nova.compute.manager [req-cb74a60b-20c1-440a-aea5-a658d3dcd8dd req-533979f1-e3e6-4703-b161-6ef07ea6202d service nova] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Received event network-vif-plugged-05cc981a-fb6c-4723-8b86-d58715a9c5e9 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1812.654296] env[63279]: DEBUG oslo_concurrency.lockutils [req-cb74a60b-20c1-440a-aea5-a658d3dcd8dd req-533979f1-e3e6-4703-b161-6ef07ea6202d service nova] Acquiring lock "a301d225-684d-4f88-bc9b-7e02b8115b9d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.654518] env[63279]: DEBUG oslo_concurrency.lockutils [req-cb74a60b-20c1-440a-aea5-a658d3dcd8dd req-533979f1-e3e6-4703-b161-6ef07ea6202d service nova] Lock "a301d225-684d-4f88-bc9b-7e02b8115b9d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.654753] env[63279]: DEBUG oslo_concurrency.lockutils [req-cb74a60b-20c1-440a-aea5-a658d3dcd8dd req-533979f1-e3e6-4703-b161-6ef07ea6202d service nova] Lock "a301d225-684d-4f88-bc9b-7e02b8115b9d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.654905] env[63279]: DEBUG nova.compute.manager [req-cb74a60b-20c1-440a-aea5-a658d3dcd8dd req-533979f1-e3e6-4703-b161-6ef07ea6202d service nova] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] No waiting events found dispatching network-vif-plugged-05cc981a-fb6c-4723-8b86-d58715a9c5e9 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1812.655131] env[63279]: WARNING nova.compute.manager [req-cb74a60b-20c1-440a-aea5-a658d3dcd8dd req-533979f1-e3e6-4703-b161-6ef07ea6202d service nova] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Received unexpected event network-vif-plugged-05cc981a-fb6c-4723-8b86-d58715a9c5e9 for instance with vm_state building and task_state spawning. [ 1812.662899] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.667030] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1812.667030] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1812.667030] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4abc43bb-11fc-4e5d-8d30-f46cf4f166fe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.671495] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Waiting for the task: (returnval){ [ 1812.671495] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52e5e748-dc5a-946e-a34d-c21c9929528d" [ 1812.671495] env[63279]: _type = "Task" [ 1812.671495] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.679414] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e5e748-dc5a-946e-a34d-c21c9929528d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.732866] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086696, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.936023] env[63279]: DEBUG nova.scheduler.client.report [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1813.030374] env[63279]: DEBUG nova.compute.manager [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1813.065304] env[63279]: DEBUG nova.virt.hardware [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1813.065613] env[63279]: DEBUG nova.virt.hardware [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1813.065794] env[63279]: DEBUG nova.virt.hardware [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1813.066083] env[63279]: DEBUG nova.virt.hardware [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1813.066320] env[63279]: DEBUG nova.virt.hardware [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1813.066435] env[63279]: DEBUG nova.virt.hardware [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1813.066623] env[63279]: DEBUG nova.virt.hardware [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1813.066842] env[63279]: DEBUG nova.virt.hardware [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1813.066954] env[63279]: DEBUG nova.virt.hardware [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1813.067155] env[63279]: DEBUG nova.virt.hardware [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1813.067351] env[63279]: DEBUG nova.virt.hardware [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1813.068740] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847772c6-5a4a-42c7-8916-a5031b13defc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.078798] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd880c6e-92d8-449a-9e6d-3131f4dbd540 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.097549] env[63279]: DEBUG nova.compute.manager [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1813.143870] env[63279]: DEBUG nova.network.neutron [req-20eafdc2-39eb-4ecd-ae51-895f8affe586 req-a6d99fb8-972b-48e8-991b-dcf2a62237ed service nova] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Updated VIF entry in instance network info cache for port c449bb4b-138e-412d-b39f-b2811a4ac7df. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1813.143870] env[63279]: DEBUG nova.network.neutron [req-20eafdc2-39eb-4ecd-ae51-895f8affe586 req-a6d99fb8-972b-48e8-991b-dcf2a62237ed service nova] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Updating instance_info_cache with network_info: [{"id": "c449bb4b-138e-412d-b39f-b2811a4ac7df", "address": "fa:16:3e:39:d1:65", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc449bb4b-13", "ovs_interfaceid": "c449bb4b-138e-412d-b39f-b2811a4ac7df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.144422] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "1a55008a-4d8c-403d-a8f4-966aa7346f4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.144536] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "1a55008a-4d8c-403d-a8f4-966aa7346f4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.193886] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e5e748-dc5a-946e-a34d-c21c9929528d, 'name': SearchDatastore_Task, 'duration_secs': 0.009233} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.194775] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f7d7e59-8ecf-4b06-b2c5-5b9922788878 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.201756] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Waiting for the task: (returnval){ [ 1813.201756] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5277bfd7-cdef-52a2-8ca5-9b9cb931d5d9" [ 1813.201756] env[63279]: _type = "Task" [ 1813.201756] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.224836] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5277bfd7-cdef-52a2-8ca5-9b9cb931d5d9, 'name': SearchDatastore_Task, 'duration_secs': 0.010275} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.226767] env[63279]: DEBUG nova.network.neutron [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Updating instance_info_cache with network_info: [{"id": "05cc981a-fb6c-4723-8b86-d58715a9c5e9", "address": "fa:16:3e:80:3a:dd", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05cc981a-fb", "ovs_interfaceid": "05cc981a-fb6c-4723-8b86-d58715a9c5e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.237876] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.238271] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 8c712d0d-55c2-4a14-b759-9441594211e1/8c712d0d-55c2-4a14-b759-9441594211e1.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1813.238730] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0870992e-d544-4b9d-8138-e752748d8dec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.248639] env[63279]: DEBUG oslo_vmware.api [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086696, 'name': PowerOnVM_Task, 'duration_secs': 0.944398} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.249618] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1813.249869] env[63279]: INFO nova.compute.manager [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Took 12.21 seconds to spawn the instance on the hypervisor. [ 1813.250101] env[63279]: DEBUG nova.compute.manager [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1813.250466] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Waiting for the task: (returnval){ [ 1813.250466] env[63279]: value = "task-2086697" [ 1813.250466] env[63279]: _type = "Task" [ 1813.250466] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.251580] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360b963a-c518-4e00-a9f3-655e80cd61ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.440869] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.439s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.441683] env[63279]: DEBUG nova.compute.manager [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1813.448080] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.868s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.449812] env[63279]: INFO nova.compute.claims [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1813.506277] env[63279]: DEBUG nova.network.neutron [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Successfully updated port: c3f2f036-d3a2-4b32-b33a-516605f2a1b1 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1813.554081] env[63279]: DEBUG nova.network.neutron [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Successfully created port: bc9f3899-95c1-4e79-b121-03c9a2c0bc44 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1813.633093] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.646228] env[63279]: DEBUG oslo_concurrency.lockutils [req-20eafdc2-39eb-4ecd-ae51-895f8affe586 req-a6d99fb8-972b-48e8-991b-dcf2a62237ed service nova] Releasing lock "refresh_cache-ac1d0e8f-446a-4a6d-a916-08f52426396d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.646514] env[63279]: DEBUG nova.compute.manager [req-20eafdc2-39eb-4ecd-ae51-895f8affe586 req-a6d99fb8-972b-48e8-991b-dcf2a62237ed service nova] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Received event network-vif-plugged-a1328084-8103-45cf-a3f8-b825bebe06c0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1813.646767] env[63279]: DEBUG oslo_concurrency.lockutils [req-20eafdc2-39eb-4ecd-ae51-895f8affe586 req-a6d99fb8-972b-48e8-991b-dcf2a62237ed service nova] Acquiring lock "8c712d0d-55c2-4a14-b759-9441594211e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.646920] env[63279]: DEBUG oslo_concurrency.lockutils [req-20eafdc2-39eb-4ecd-ae51-895f8affe586 req-a6d99fb8-972b-48e8-991b-dcf2a62237ed service nova] Lock "8c712d0d-55c2-4a14-b759-9441594211e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.647112] env[63279]: DEBUG oslo_concurrency.lockutils [req-20eafdc2-39eb-4ecd-ae51-895f8affe586 req-a6d99fb8-972b-48e8-991b-dcf2a62237ed service nova] Lock "8c712d0d-55c2-4a14-b759-9441594211e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.647281] env[63279]: DEBUG nova.compute.manager [req-20eafdc2-39eb-4ecd-ae51-895f8affe586 req-a6d99fb8-972b-48e8-991b-dcf2a62237ed service nova] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] No waiting events found dispatching network-vif-plugged-a1328084-8103-45cf-a3f8-b825bebe06c0 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1813.647446] env[63279]: WARNING nova.compute.manager [req-20eafdc2-39eb-4ecd-ae51-895f8affe586 req-a6d99fb8-972b-48e8-991b-dcf2a62237ed service nova] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Received unexpected event network-vif-plugged-a1328084-8103-45cf-a3f8-b825bebe06c0 for instance with vm_state building and task_state spawning. [ 1813.739968] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Releasing lock "refresh_cache-a301d225-684d-4f88-bc9b-7e02b8115b9d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.740434] env[63279]: DEBUG nova.compute.manager [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Instance network_info: |[{"id": "05cc981a-fb6c-4723-8b86-d58715a9c5e9", "address": "fa:16:3e:80:3a:dd", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05cc981a-fb", "ovs_interfaceid": "05cc981a-fb6c-4723-8b86-d58715a9c5e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1813.740805] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:3a:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05cc981a-fb6c-4723-8b86-d58715a9c5e9', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1813.750330] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Creating folder: Project (930a294f790e4ba58a1295ce3240f10a). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1813.750733] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba407c0a-d15f-428f-b166-c7b5082ecfeb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.764457] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086697, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.765952] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Created folder: Project (930a294f790e4ba58a1295ce3240f10a) in parent group-v427491. [ 1813.766186] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Creating folder: Instances. Parent ref: group-v427504. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1813.766445] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6dd24a6e-6811-4082-b97e-701e1cf9b250 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.779453] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Created folder: Instances in parent group-v427504. [ 1813.779453] env[63279]: DEBUG oslo.service.loopingcall [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1813.780206] env[63279]: INFO nova.compute.manager [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Took 21.36 seconds to build instance. [ 1813.781628] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1813.781628] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2c45834-2f01-4d98-a326-93ca7f131325 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.804357] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1813.804357] env[63279]: value = "task-2086700" [ 1813.804357] env[63279]: _type = "Task" [ 1813.804357] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.817271] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086700, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.957950] env[63279]: DEBUG nova.compute.utils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1813.965709] env[63279]: DEBUG nova.compute.manager [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1813.965709] env[63279]: DEBUG nova.network.neutron [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1814.009320] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Acquiring lock "refresh_cache-21c2bba1-5482-496a-9e2a-f123a94ed48a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.009470] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Acquired lock "refresh_cache-21c2bba1-5482-496a-9e2a-f123a94ed48a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.009627] env[63279]: DEBUG nova.network.neutron [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1814.271496] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086697, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619066} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.271496] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 8c712d0d-55c2-4a14-b759-9441594211e1/8c712d0d-55c2-4a14-b759-9441594211e1.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1814.271496] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1814.271496] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a9b70647-e556-4f22-b493-9cf70900f92e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.282553] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Waiting for the task: (returnval){ [ 1814.282553] env[63279]: value = "task-2086701" [ 1814.282553] env[63279]: _type = "Task" [ 1814.282553] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.297943] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086701, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.298476] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4b5d649-5bea-45f1-99e3-0ca20d156863 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Lock "ac1d0e8f-446a-4a6d-a916-08f52426396d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.884s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.317866] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086700, 'name': CreateVM_Task, 'duration_secs': 0.491733} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.319579] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1814.320326] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.320504] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.321204] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1814.321551] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84d07bd7-ed38-4e0e-ac61-5c6518150685 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.328977] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1814.328977] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52cb507b-2038-e709-c016-8f88cb6dda53" [ 1814.328977] env[63279]: _type = "Task" [ 1814.328977] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.339441] env[63279]: DEBUG nova.policy [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a111f7e262b04ab9b1bc14397ce09b08', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0b41258d6a444b558b3f3256f2f7d6eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1814.344086] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52cb507b-2038-e709-c016-8f88cb6dda53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.470224] env[63279]: DEBUG nova.compute.manager [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1814.643931] env[63279]: DEBUG nova.network.neutron [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1814.796060] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086701, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073332} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.798872] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1814.802751] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209d8f8d-2e3a-4c75-8091-59d4d327a79f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.805525] env[63279]: DEBUG nova.compute.manager [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1814.835436] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 8c712d0d-55c2-4a14-b759-9441594211e1/8c712d0d-55c2-4a14-b759-9441594211e1.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1814.836838] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5893c1d-58a0-4013-b93e-4022af1a82af {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.852730] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d7a840-2f8e-46e3-84af-aff9f4420dc1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.874877] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab2177fb-1277-4145-be1d-b609c0fd107e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.876776] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Waiting for the task: (returnval){ [ 1814.876776] env[63279]: value = "task-2086702" [ 1814.876776] env[63279]: _type = "Task" [ 1814.876776] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.885440] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52cb507b-2038-e709-c016-8f88cb6dda53, 'name': SearchDatastore_Task, 'duration_secs': 0.013093} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.913671] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.914047] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1814.915267] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.915267] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.915267] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1814.921868] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07cc25b3-9f91-4416-bd1c-86ef241cac7f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.924064] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f138f533-e19b-4d2e-a44f-3421ec19fc84 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.927035] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086702, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.933193] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e847971-87ec-4840-a711-8efd30639794 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.940837] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1814.940837] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1814.950597] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42fe051e-b062-445f-9a71-3e0a95b38db8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.954411] env[63279]: DEBUG nova.compute.provider_tree [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1814.960689] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1814.960689] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c139c0-9a7a-e3c6-e7ab-17da525bfd74" [ 1814.960689] env[63279]: _type = "Task" [ 1814.960689] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.968835] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c139c0-9a7a-e3c6-e7ab-17da525bfd74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.042170] env[63279]: DEBUG nova.network.neutron [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Successfully updated port: c3997b27-53bf-4f2d-828a-a55447612331 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1815.132308] env[63279]: DEBUG nova.network.neutron [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Updating instance_info_cache with network_info: [{"id": "c3f2f036-d3a2-4b32-b33a-516605f2a1b1", "address": "fa:16:3e:a0:0e:b7", "network": {"id": "aad6915d-e456-4136-9ba0-33423379f7e8", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-781920893-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abb95b37d6914c4f8624f2f924a82f3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3f2f036-d3", "ovs_interfaceid": "c3f2f036-d3a2-4b32-b33a-516605f2a1b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1815.336334] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.389659] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086702, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.458021] env[63279]: DEBUG nova.scheduler.client.report [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1815.471861] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c139c0-9a7a-e3c6-e7ab-17da525bfd74, 'name': SearchDatastore_Task, 'duration_secs': 0.026887} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.473665] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d751b7b3-b670-43f8-954b-108bb8877148 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.477864] env[63279]: DEBUG nova.compute.manager [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1815.482431] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1815.482431] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521cfe87-125e-0777-0de7-f16dfc38c6a2" [ 1815.482431] env[63279]: _type = "Task" [ 1815.482431] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.492154] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521cfe87-125e-0777-0de7-f16dfc38c6a2, 'name': SearchDatastore_Task, 'duration_secs': 0.011194} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.492448] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.492757] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] a301d225-684d-4f88-bc9b-7e02b8115b9d/a301d225-684d-4f88-bc9b-7e02b8115b9d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1815.493163] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e8762f0-dfd2-4a2c-a8aa-78a8f2dd88e5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.500115] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1815.500115] env[63279]: value = "task-2086703" [ 1815.500115] env[63279]: _type = "Task" [ 1815.500115] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.511824] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2086703, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.515762] env[63279]: DEBUG nova.virt.hardware [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1815.517581] env[63279]: DEBUG nova.virt.hardware [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1815.517581] env[63279]: DEBUG nova.virt.hardware [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1815.517581] env[63279]: DEBUG nova.virt.hardware [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1815.517581] env[63279]: DEBUG nova.virt.hardware [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1815.517581] env[63279]: DEBUG nova.virt.hardware [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1815.517917] env[63279]: DEBUG nova.virt.hardware [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1815.517917] env[63279]: DEBUG nova.virt.hardware [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1815.517917] env[63279]: DEBUG nova.virt.hardware [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1815.517917] env[63279]: DEBUG nova.virt.hardware [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1815.518098] env[63279]: DEBUG nova.virt.hardware [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1815.519329] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adcf9594-0a7e-4af0-9a34-37bf0515a5ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.527365] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b39d373-384d-4376-97ba-22bfe6e8bb34 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.544038] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Acquiring lock "refresh_cache-24bce28c-fc43-4f17-9800-4d980f6729bc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.547419] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Acquired lock "refresh_cache-24bce28c-fc43-4f17-9800-4d980f6729bc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.547419] env[63279]: DEBUG nova.network.neutron [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1815.634741] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Releasing lock "refresh_cache-21c2bba1-5482-496a-9e2a-f123a94ed48a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.635112] env[63279]: DEBUG nova.compute.manager [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Instance network_info: |[{"id": "c3f2f036-d3a2-4b32-b33a-516605f2a1b1", "address": "fa:16:3e:a0:0e:b7", "network": {"id": "aad6915d-e456-4136-9ba0-33423379f7e8", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-781920893-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abb95b37d6914c4f8624f2f924a82f3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3f2f036-d3", "ovs_interfaceid": "c3f2f036-d3a2-4b32-b33a-516605f2a1b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1815.635543] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:0e:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2c424c9-6446-4b2a-af8c-4d9c29117c39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3f2f036-d3a2-4b32-b33a-516605f2a1b1', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1815.643727] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Creating folder: Project (abb95b37d6914c4f8624f2f924a82f3c). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1815.644200] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f2b25ac-54d9-4c9e-83c6-7e76e11490d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.655020] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Created folder: Project (abb95b37d6914c4f8624f2f924a82f3c) in parent group-v427491. [ 1815.655443] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Creating folder: Instances. Parent ref: group-v427507. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1815.655617] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f29ef3c8-2278-49d8-b516-bb4d85e32611 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.666140] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Created folder: Instances in parent group-v427507. [ 1815.666140] env[63279]: DEBUG oslo.service.loopingcall [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1815.666384] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1815.666608] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c8a5ca3-4053-41e4-a6fe-a8933a5d6a0d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.689636] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1815.689636] env[63279]: value = "task-2086706" [ 1815.689636] env[63279]: _type = "Task" [ 1815.689636] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.697850] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086706, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.890366] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086702, 'name': ReconfigVM_Task, 'duration_secs': 0.753875} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.890687] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 8c712d0d-55c2-4a14-b759-9441594211e1/8c712d0d-55c2-4a14-b759-9441594211e1.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1815.891661] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69dd5cde-eafa-48ed-8339-da15dea12dd4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.899736] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Waiting for the task: (returnval){ [ 1815.899736] env[63279]: value = "task-2086707" [ 1815.899736] env[63279]: _type = "Task" [ 1815.899736] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.911360] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086707, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.966444] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.520s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.967094] env[63279]: DEBUG nova.compute.manager [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1815.973132] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.310s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.975170] env[63279]: INFO nova.compute.claims [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1816.013363] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2086703, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.120146] env[63279]: DEBUG nova.network.neutron [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1816.203363] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086706, 'name': CreateVM_Task, 'duration_secs': 0.459958} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.206666] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1816.207654] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1816.207791] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1816.208153] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1816.209470] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8460248-afd7-421d-8b40-8dbaadc0885f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.215850] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Waiting for the task: (returnval){ [ 1816.215850] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52921619-b12f-b357-dead-467273d32104" [ 1816.215850] env[63279]: _type = "Task" [ 1816.215850] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.227452] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52921619-b12f-b357-dead-467273d32104, 'name': SearchDatastore_Task, 'duration_secs': 0.008968} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.227936] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1816.228195] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1816.228427] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1816.228571] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1816.229288] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1816.229288] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c609d357-fcd5-438d-8b42-b80a9844a912 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.240946] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1816.241215] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1816.242068] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7376bba7-76cc-4273-b3d2-3c4c02184390 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.247438] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Waiting for the task: (returnval){ [ 1816.247438] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]522a6478-f182-a551-34a1-167ff80b5096" [ 1816.247438] env[63279]: _type = "Task" [ 1816.247438] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.255824] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]522a6478-f182-a551-34a1-167ff80b5096, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.347225] env[63279]: DEBUG nova.compute.manager [req-a91ab7d9-2e1c-49c3-b204-de7c0a88c888 req-2346fcca-2ec2-47dc-a509-8abc7b82e986 service nova] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Received event network-changed-a1328084-8103-45cf-a3f8-b825bebe06c0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1816.348340] env[63279]: DEBUG nova.compute.manager [req-a91ab7d9-2e1c-49c3-b204-de7c0a88c888 req-2346fcca-2ec2-47dc-a509-8abc7b82e986 service nova] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Refreshing instance network info cache due to event network-changed-a1328084-8103-45cf-a3f8-b825bebe06c0. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1816.348812] env[63279]: DEBUG oslo_concurrency.lockutils [req-a91ab7d9-2e1c-49c3-b204-de7c0a88c888 req-2346fcca-2ec2-47dc-a509-8abc7b82e986 service nova] Acquiring lock "refresh_cache-8c712d0d-55c2-4a14-b759-9441594211e1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1816.349225] env[63279]: DEBUG oslo_concurrency.lockutils [req-a91ab7d9-2e1c-49c3-b204-de7c0a88c888 req-2346fcca-2ec2-47dc-a509-8abc7b82e986 service nova] Acquired lock "refresh_cache-8c712d0d-55c2-4a14-b759-9441594211e1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1816.350028] env[63279]: DEBUG nova.network.neutron [req-a91ab7d9-2e1c-49c3-b204-de7c0a88c888 req-2346fcca-2ec2-47dc-a509-8abc7b82e986 service nova] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Refreshing network info cache for port a1328084-8103-45cf-a3f8-b825bebe06c0 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1816.413104] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086707, 'name': Rename_Task, 'duration_secs': 0.216588} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.413380] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1816.413640] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ff876d6-4d73-43f0-ab7d-67f1107dac66 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.420523] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Waiting for the task: (returnval){ [ 1816.420523] env[63279]: value = "task-2086708" [ 1816.420523] env[63279]: _type = "Task" [ 1816.420523] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.431210] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086708, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.480061] env[63279]: DEBUG nova.compute.utils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1816.483601] env[63279]: DEBUG nova.compute.manager [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1816.483787] env[63279]: DEBUG nova.network.neutron [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1816.516273] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2086703, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514217} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.516273] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] a301d225-684d-4f88-bc9b-7e02b8115b9d/a301d225-684d-4f88-bc9b-7e02b8115b9d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1816.516273] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1816.516273] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e661b180-6819-4519-938f-fe977b183d95 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.520235] env[63279]: DEBUG nova.network.neutron [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Updating instance_info_cache with network_info: [{"id": "c3997b27-53bf-4f2d-828a-a55447612331", "address": "fa:16:3e:86:fe:67", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3997b27-53", "ovs_interfaceid": "c3997b27-53bf-4f2d-828a-a55447612331", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1816.524218] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1816.524218] env[63279]: value = "task-2086709" [ 1816.524218] env[63279]: _type = "Task" [ 1816.524218] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.535213] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2086709, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.632485] env[63279]: DEBUG nova.policy [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c9e365ca4f941f19cd9e82676a52acc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f6c6f65521a440fb80278bbff2d0ed0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1816.726191] env[63279]: DEBUG nova.network.neutron [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Successfully created port: 2cc4a33a-bd88-4aec-a588-7c821bebf971 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1816.758255] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]522a6478-f182-a551-34a1-167ff80b5096, 'name': SearchDatastore_Task, 'duration_secs': 0.00848} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.759071] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4059bbff-75c9-49dc-b68c-4ae3c74e6dbe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.765325] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Waiting for the task: (returnval){ [ 1816.765325] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5268c801-59b0-0499-21c6-714974881a19" [ 1816.765325] env[63279]: _type = "Task" [ 1816.765325] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.774353] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5268c801-59b0-0499-21c6-714974881a19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.872135] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "41654a82-af5e-463e-80f9-86ba13a5ad2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.872555] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "41654a82-af5e-463e-80f9-86ba13a5ad2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.932426] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086708, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.987481] env[63279]: DEBUG nova.compute.manager [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1817.028017] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Releasing lock "refresh_cache-24bce28c-fc43-4f17-9800-4d980f6729bc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1817.028017] env[63279]: DEBUG nova.compute.manager [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Instance network_info: |[{"id": "c3997b27-53bf-4f2d-828a-a55447612331", "address": "fa:16:3e:86:fe:67", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3997b27-53", "ovs_interfaceid": "c3997b27-53bf-4f2d-828a-a55447612331", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1817.028213] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:fe:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3997b27-53bf-4f2d-828a-a55447612331', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1817.034901] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Creating folder: Project (c54ed03fdab9451b9f9613a306deb381). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1817.042799] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2f2355e-c034-4825-aa8b-6c7472d7150a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.052027] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2086709, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083086} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.052334] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1817.053268] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce69c1d-4450-44ba-9efa-7ad7f78c567d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.058926] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Created folder: Project (c54ed03fdab9451b9f9613a306deb381) in parent group-v427491. [ 1817.058982] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Creating folder: Instances. Parent ref: group-v427510. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1817.059588] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9d80178-248a-4c46-956b-c6b5cb7efe41 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.081175] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] a301d225-684d-4f88-bc9b-7e02b8115b9d/a301d225-684d-4f88-bc9b-7e02b8115b9d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1817.083551] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90275409-9e6d-40a4-a744-4716dda6b755 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.102156] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Created folder: Instances in parent group-v427510. [ 1817.102295] env[63279]: DEBUG oslo.service.loopingcall [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1817.102913] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1817.103190] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-baf0aaea-6058-4644-8a00-693c849a0d19 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.122062] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1817.122062] env[63279]: value = "task-2086712" [ 1817.122062] env[63279]: _type = "Task" [ 1817.122062] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.127195] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1817.127195] env[63279]: value = "task-2086713" [ 1817.127195] env[63279]: _type = "Task" [ 1817.127195] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.143247] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2086712, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.143247] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086713, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.282539] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5268c801-59b0-0499-21c6-714974881a19, 'name': SearchDatastore_Task, 'duration_secs': 0.03181} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.282539] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1817.282908] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 21c2bba1-5482-496a-9e2a-f123a94ed48a/21c2bba1-5482-496a-9e2a-f123a94ed48a.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1817.282999] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b30433a6-be6f-4553-8784-87d7488050af {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.293544] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Waiting for the task: (returnval){ [ 1817.293544] env[63279]: value = "task-2086714" [ 1817.293544] env[63279]: _type = "Task" [ 1817.293544] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.303079] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086714, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.403712] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f6447d-7892-4270-be91-488f8fee2bbf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.411637] env[63279]: DEBUG nova.compute.manager [req-b6b6ca24-82d7-45b7-b8c1-768b9c7f67f9 req-9c83b2fa-5237-4959-9650-d9b49b925830 service nova] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Received event network-changed-05cc981a-fb6c-4723-8b86-d58715a9c5e9 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1817.411637] env[63279]: DEBUG nova.compute.manager [req-b6b6ca24-82d7-45b7-b8c1-768b9c7f67f9 req-9c83b2fa-5237-4959-9650-d9b49b925830 service nova] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Refreshing instance network info cache due to event network-changed-05cc981a-fb6c-4723-8b86-d58715a9c5e9. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1817.413807] env[63279]: DEBUG oslo_concurrency.lockutils [req-b6b6ca24-82d7-45b7-b8c1-768b9c7f67f9 req-9c83b2fa-5237-4959-9650-d9b49b925830 service nova] Acquiring lock "refresh_cache-a301d225-684d-4f88-bc9b-7e02b8115b9d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.413807] env[63279]: DEBUG oslo_concurrency.lockutils [req-b6b6ca24-82d7-45b7-b8c1-768b9c7f67f9 req-9c83b2fa-5237-4959-9650-d9b49b925830 service nova] Acquired lock "refresh_cache-a301d225-684d-4f88-bc9b-7e02b8115b9d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.413807] env[63279]: DEBUG nova.network.neutron [req-b6b6ca24-82d7-45b7-b8c1-768b9c7f67f9 req-9c83b2fa-5237-4959-9650-d9b49b925830 service nova] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Refreshing network info cache for port 05cc981a-fb6c-4723-8b86-d58715a9c5e9 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1817.421409] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cacfb54-832c-471b-9dfa-b1a0cfaa8379 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.454863] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b05bcf-8709-42d1-a26f-5a474122f7cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.466914] env[63279]: DEBUG oslo_vmware.api [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086708, 'name': PowerOnVM_Task, 'duration_secs': 1.005162} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.470321] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1817.470443] env[63279]: INFO nova.compute.manager [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Took 14.03 seconds to spawn the instance on the hypervisor. [ 1817.470715] env[63279]: DEBUG nova.compute.manager [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1817.472413] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386f1507-0e9f-4f79-8391-94374d84ca75 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.477235] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c179af1e-205c-4c53-9c62-7666448e8486 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.510865] env[63279]: DEBUG nova.compute.provider_tree [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1817.638293] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2086712, 'name': ReconfigVM_Task, 'duration_secs': 0.405143} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.641978] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Reconfigured VM instance instance-00000005 to attach disk [datastore1] a301d225-684d-4f88-bc9b-7e02b8115b9d/a301d225-684d-4f88-bc9b-7e02b8115b9d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1817.642626] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086713, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.642849] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fcfc6f68-e307-4f92-a726-c7dcad8102af {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.652064] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1817.652064] env[63279]: value = "task-2086715" [ 1817.652064] env[63279]: _type = "Task" [ 1817.652064] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.660926] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2086715, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.811328] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086714, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.905970] env[63279]: DEBUG nova.network.neutron [req-a91ab7d9-2e1c-49c3-b204-de7c0a88c888 req-2346fcca-2ec2-47dc-a509-8abc7b82e986 service nova] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Updated VIF entry in instance network info cache for port a1328084-8103-45cf-a3f8-b825bebe06c0. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1817.906394] env[63279]: DEBUG nova.network.neutron [req-a91ab7d9-2e1c-49c3-b204-de7c0a88c888 req-2346fcca-2ec2-47dc-a509-8abc7b82e986 service nova] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Updating instance_info_cache with network_info: [{"id": "a1328084-8103-45cf-a3f8-b825bebe06c0", "address": "fa:16:3e:e0:86:46", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.80", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1328084-81", "ovs_interfaceid": "a1328084-8103-45cf-a3f8-b825bebe06c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.020057] env[63279]: DEBUG nova.compute.manager [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1818.023997] env[63279]: DEBUG nova.scheduler.client.report [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1818.032353] env[63279]: INFO nova.compute.manager [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Took 24.48 seconds to build instance. [ 1818.058036] env[63279]: DEBUG nova.virt.hardware [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1818.058507] env[63279]: DEBUG nova.virt.hardware [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1818.058507] env[63279]: DEBUG nova.virt.hardware [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1818.058621] env[63279]: DEBUG nova.virt.hardware [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1818.058793] env[63279]: DEBUG nova.virt.hardware [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1818.058921] env[63279]: DEBUG nova.virt.hardware [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1818.061328] env[63279]: DEBUG nova.virt.hardware [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1818.061328] env[63279]: DEBUG nova.virt.hardware [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1818.061328] env[63279]: DEBUG nova.virt.hardware [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1818.061328] env[63279]: DEBUG nova.virt.hardware [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1818.061328] env[63279]: DEBUG nova.virt.hardware [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1818.064032] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c551743-aca6-4213-b4df-2b85bd0013a0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.073053] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7b03da-5e97-49de-be52-1b59068ed21a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.108892] env[63279]: DEBUG nova.network.neutron [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Successfully created port: 63a3f09b-45d0-46eb-a197-186f1ec415d6 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1818.145914] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086713, 'name': CreateVM_Task, 'duration_secs': 0.572092} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.146138] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1818.146853] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.147026] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.147350] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1818.147864] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8d3a5bb-1469-44cf-94fa-90caedd1dc9d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.158450] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Waiting for the task: (returnval){ [ 1818.158450] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bc614c-0893-11b7-27be-922a4d436cbb" [ 1818.158450] env[63279]: _type = "Task" [ 1818.158450] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.170166] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2086715, 'name': Rename_Task, 'duration_secs': 0.287682} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.170624] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1818.170624] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c3ae4d5-78db-4df5-96ae-fedc9282d8c9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.177590] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bc614c-0893-11b7-27be-922a4d436cbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.181118] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1818.181118] env[63279]: value = "task-2086716" [ 1818.181118] env[63279]: _type = "Task" [ 1818.181118] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.193420] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2086716, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.307968] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086714, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.690529} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.308255] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 21c2bba1-5482-496a-9e2a-f123a94ed48a/21c2bba1-5482-496a-9e2a-f123a94ed48a.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1818.308461] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1818.308711] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-acfdfeb4-711a-44c8-a93f-75212dca2d2f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.316057] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Waiting for the task: (returnval){ [ 1818.316057] env[63279]: value = "task-2086717" [ 1818.316057] env[63279]: _type = "Task" [ 1818.316057] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.328872] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086717, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.413606] env[63279]: DEBUG oslo_concurrency.lockutils [req-a91ab7d9-2e1c-49c3-b204-de7c0a88c888 req-2346fcca-2ec2-47dc-a509-8abc7b82e986 service nova] Releasing lock "refresh_cache-8c712d0d-55c2-4a14-b759-9441594211e1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.413763] env[63279]: DEBUG nova.compute.manager [req-a91ab7d9-2e1c-49c3-b204-de7c0a88c888 req-2346fcca-2ec2-47dc-a509-8abc7b82e986 service nova] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Received event network-vif-plugged-c3f2f036-d3a2-4b32-b33a-516605f2a1b1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1818.413937] env[63279]: DEBUG oslo_concurrency.lockutils [req-a91ab7d9-2e1c-49c3-b204-de7c0a88c888 req-2346fcca-2ec2-47dc-a509-8abc7b82e986 service nova] Acquiring lock "21c2bba1-5482-496a-9e2a-f123a94ed48a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.414141] env[63279]: DEBUG oslo_concurrency.lockutils [req-a91ab7d9-2e1c-49c3-b204-de7c0a88c888 req-2346fcca-2ec2-47dc-a509-8abc7b82e986 service nova] Lock "21c2bba1-5482-496a-9e2a-f123a94ed48a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.414303] env[63279]: DEBUG oslo_concurrency.lockutils [req-a91ab7d9-2e1c-49c3-b204-de7c0a88c888 req-2346fcca-2ec2-47dc-a509-8abc7b82e986 service nova] Lock "21c2bba1-5482-496a-9e2a-f123a94ed48a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.414476] env[63279]: DEBUG nova.compute.manager [req-a91ab7d9-2e1c-49c3-b204-de7c0a88c888 req-2346fcca-2ec2-47dc-a509-8abc7b82e986 service nova] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] No waiting events found dispatching network-vif-plugged-c3f2f036-d3a2-4b32-b33a-516605f2a1b1 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1818.414647] env[63279]: WARNING nova.compute.manager [req-a91ab7d9-2e1c-49c3-b204-de7c0a88c888 req-2346fcca-2ec2-47dc-a509-8abc7b82e986 service nova] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Received unexpected event network-vif-plugged-c3f2f036-d3a2-4b32-b33a-516605f2a1b1 for instance with vm_state building and task_state spawning. [ 1818.471090] env[63279]: DEBUG nova.network.neutron [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Successfully updated port: bc9f3899-95c1-4e79-b121-03c9a2c0bc44 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1818.534308] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.534945] env[63279]: DEBUG nova.compute.manager [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1818.542299] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.909s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.542299] env[63279]: INFO nova.compute.claims [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1818.548682] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a59d5ce1-88f2-4b01-b5f9-87a8413baff4 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Lock "8c712d0d-55c2-4a14-b759-9441594211e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.011s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.673184] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bc614c-0893-11b7-27be-922a4d436cbb, 'name': SearchDatastore_Task, 'duration_secs': 0.025621} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.673576] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.673941] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1818.674649] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.674899] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.675218] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1818.675718] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f79b6744-98c6-4112-8951-fac4dc028af6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.690070] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2086716, 'name': PowerOnVM_Task} progress is 71%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.691596] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1818.691846] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1818.692490] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e51203e7-778f-4210-8c2a-edace6953ecf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.698842] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Waiting for the task: (returnval){ [ 1818.698842] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]522d5a70-b2dd-ba59-fffb-c9cf72d8d10f" [ 1818.698842] env[63279]: _type = "Task" [ 1818.698842] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.709573] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]522d5a70-b2dd-ba59-fffb-c9cf72d8d10f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.808122] env[63279]: DEBUG nova.network.neutron [req-b6b6ca24-82d7-45b7-b8c1-768b9c7f67f9 req-9c83b2fa-5237-4959-9650-d9b49b925830 service nova] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Updated VIF entry in instance network info cache for port 05cc981a-fb6c-4723-8b86-d58715a9c5e9. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1818.808800] env[63279]: DEBUG nova.network.neutron [req-b6b6ca24-82d7-45b7-b8c1-768b9c7f67f9 req-9c83b2fa-5237-4959-9650-d9b49b925830 service nova] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Updating instance_info_cache with network_info: [{"id": "05cc981a-fb6c-4723-8b86-d58715a9c5e9", "address": "fa:16:3e:80:3a:dd", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05cc981a-fb", "ovs_interfaceid": "05cc981a-fb6c-4723-8b86-d58715a9c5e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.833041] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086717, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078007} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.834128] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1818.834992] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed94ff1a-a1c1-4473-99d7-596359fb3990 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.867142] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 21c2bba1-5482-496a-9e2a-f123a94ed48a/21c2bba1-5482-496a-9e2a-f123a94ed48a.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1818.867697] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdc129d2-3a68-480f-8b8f-2d1f2a64d958 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.891333] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Waiting for the task: (returnval){ [ 1818.891333] env[63279]: value = "task-2086718" [ 1818.891333] env[63279]: _type = "Task" [ 1818.891333] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.902428] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086718, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.978018] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "refresh_cache-6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.978018] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "refresh_cache-6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.978018] env[63279]: DEBUG nova.network.neutron [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1819.050816] env[63279]: DEBUG nova.compute.utils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1819.052505] env[63279]: DEBUG nova.compute.manager [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1819.052804] env[63279]: DEBUG nova.network.neutron [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1819.057162] env[63279]: DEBUG nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1819.191659] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2086716, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.204530] env[63279]: DEBUG nova.policy [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b2de1fc937048838f56d697c7fe73ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afdc0a67b163421aafe94f0392a66fa8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1819.214050] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]522d5a70-b2dd-ba59-fffb-c9cf72d8d10f, 'name': SearchDatastore_Task, 'duration_secs': 0.040388} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.214918] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3bdfb74-d183-494f-8194-0bef4ab61366 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.223213] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Waiting for the task: (returnval){ [ 1819.223213] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f5f327-fc72-00bb-430e-7cef72acd58f" [ 1819.223213] env[63279]: _type = "Task" [ 1819.223213] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.239413] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f5f327-fc72-00bb-430e-7cef72acd58f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.313090] env[63279]: DEBUG oslo_concurrency.lockutils [req-b6b6ca24-82d7-45b7-b8c1-768b9c7f67f9 req-9c83b2fa-5237-4959-9650-d9b49b925830 service nova] Releasing lock "refresh_cache-a301d225-684d-4f88-bc9b-7e02b8115b9d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.313425] env[63279]: DEBUG nova.compute.manager [req-b6b6ca24-82d7-45b7-b8c1-768b9c7f67f9 req-9c83b2fa-5237-4959-9650-d9b49b925830 service nova] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Received event network-vif-plugged-c3997b27-53bf-4f2d-828a-a55447612331 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1819.313710] env[63279]: DEBUG oslo_concurrency.lockutils [req-b6b6ca24-82d7-45b7-b8c1-768b9c7f67f9 req-9c83b2fa-5237-4959-9650-d9b49b925830 service nova] Acquiring lock "24bce28c-fc43-4f17-9800-4d980f6729bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.313946] env[63279]: DEBUG oslo_concurrency.lockutils [req-b6b6ca24-82d7-45b7-b8c1-768b9c7f67f9 req-9c83b2fa-5237-4959-9650-d9b49b925830 service nova] Lock "24bce28c-fc43-4f17-9800-4d980f6729bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.314143] env[63279]: DEBUG oslo_concurrency.lockutils [req-b6b6ca24-82d7-45b7-b8c1-768b9c7f67f9 req-9c83b2fa-5237-4959-9650-d9b49b925830 service nova] Lock "24bce28c-fc43-4f17-9800-4d980f6729bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.314367] env[63279]: DEBUG nova.compute.manager [req-b6b6ca24-82d7-45b7-b8c1-768b9c7f67f9 req-9c83b2fa-5237-4959-9650-d9b49b925830 service nova] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] No waiting events found dispatching network-vif-plugged-c3997b27-53bf-4f2d-828a-a55447612331 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1819.316375] env[63279]: WARNING nova.compute.manager [req-b6b6ca24-82d7-45b7-b8c1-768b9c7f67f9 req-9c83b2fa-5237-4959-9650-d9b49b925830 service nova] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Received unexpected event network-vif-plugged-c3997b27-53bf-4f2d-828a-a55447612331 for instance with vm_state building and task_state spawning. [ 1819.406579] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086718, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.495730] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Acquiring lock "4063d5e0-1144-40fa-8ed8-efda16730617" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.495730] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Lock "4063d5e0-1144-40fa-8ed8-efda16730617" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.495730] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Acquiring lock "4063d5e0-1144-40fa-8ed8-efda16730617-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.495730] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Lock "4063d5e0-1144-40fa-8ed8-efda16730617-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.495981] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Lock "4063d5e0-1144-40fa-8ed8-efda16730617-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.496442] env[63279]: INFO nova.compute.manager [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Terminating instance [ 1819.560859] env[63279]: DEBUG nova.compute.manager [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1819.578724] env[63279]: DEBUG nova.network.neutron [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1819.597953] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.698469] env[63279]: DEBUG oslo_vmware.api [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2086716, 'name': PowerOnVM_Task, 'duration_secs': 1.110592} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.699297] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1819.699297] env[63279]: INFO nova.compute.manager [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Took 13.92 seconds to spawn the instance on the hypervisor. [ 1819.699583] env[63279]: DEBUG nova.compute.manager [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1819.700774] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7829288-7d60-443c-823b-682b0bb1eeec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.747803] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f5f327-fc72-00bb-430e-7cef72acd58f, 'name': SearchDatastore_Task, 'duration_secs': 0.019353} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.749657] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.749657] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 24bce28c-fc43-4f17-9800-4d980f6729bc/24bce28c-fc43-4f17-9800-4d980f6729bc.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1819.749657] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30977048-e54c-4361-a886-aabf0f4e39c4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.756511] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Waiting for the task: (returnval){ [ 1819.756511] env[63279]: value = "task-2086719" [ 1819.756511] env[63279]: _type = "Task" [ 1819.756511] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.766986] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086719, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.905257] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086718, 'name': ReconfigVM_Task, 'duration_secs': 0.521802} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.905680] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 21c2bba1-5482-496a-9e2a-f123a94ed48a/21c2bba1-5482-496a-9e2a-f123a94ed48a.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1819.906265] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84231194-2c3d-4975-8846-36bf1b9bef1b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.916144] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Waiting for the task: (returnval){ [ 1819.916144] env[63279]: value = "task-2086720" [ 1819.916144] env[63279]: _type = "Task" [ 1819.916144] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.925674] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086720, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.945775] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422fd5ad-455b-405c-a9ac-ce296bd2b15e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.954093] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-619ff851-dff2-47cb-ab51-7a2204fed38a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.989083] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a7c706-6ba8-40fe-8c07-880c6ba866b8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.997543] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcefd82e-78c3-4eb8-a19d-82421fc1ce77 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.004155] env[63279]: DEBUG nova.compute.manager [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1820.004155] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1820.004155] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba7319c-f0b3-40fa-b197-611656c073fe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.019261] env[63279]: DEBUG nova.compute.provider_tree [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1820.023995] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1820.023995] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-234f51fd-9980-4ed0-919d-f2698b6741ef {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.033450] env[63279]: DEBUG oslo_vmware.api [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for the task: (returnval){ [ 1820.033450] env[63279]: value = "task-2086721" [ 1820.033450] env[63279]: _type = "Task" [ 1820.033450] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.043691] env[63279]: DEBUG oslo_vmware.api [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086721, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.227775] env[63279]: INFO nova.compute.manager [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Took 26.60 seconds to build instance. [ 1820.273352] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086719, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.428475] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086720, 'name': Rename_Task, 'duration_secs': 0.20352} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.429128] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1820.429549] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8edff11d-a492-4dc0-9b10-b681e0c35a8e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.436820] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Waiting for the task: (returnval){ [ 1820.436820] env[63279]: value = "task-2086722" [ 1820.436820] env[63279]: _type = "Task" [ 1820.436820] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.438268] env[63279]: DEBUG nova.network.neutron [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Updating instance_info_cache with network_info: [{"id": "bc9f3899-95c1-4e79-b121-03c9a2c0bc44", "address": "fa:16:3e:9a:65:be", "network": {"id": "548d80cd-fb6c-47fc-8c1d-036889987399", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-219167599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f6c6f65521a440fb80278bbff2d0ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc9f3899-95", "ovs_interfaceid": "bc9f3899-95c1-4e79-b121-03c9a2c0bc44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.443870] env[63279]: DEBUG nova.network.neutron [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Successfully created port: dc489c36-2ef9-4f47-aadb-4f6503bb9d1b {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1820.453394] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086722, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.492680] env[63279]: DEBUG nova.network.neutron [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Successfully updated port: 2cc4a33a-bd88-4aec-a588-7c821bebf971 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1820.545090] env[63279]: DEBUG oslo_vmware.api [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086721, 'name': PowerOffVM_Task, 'duration_secs': 0.337068} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.545863] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1820.545863] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1820.545863] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b66427f-45ad-4af9-8fca-cfabc59d8c95 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.552398] env[63279]: ERROR nova.scheduler.client.report [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [req-f890fe22-c59b-4ef9-b597-0b05cc5913dc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f890fe22-c59b-4ef9-b597-0b05cc5913dc"}]} [ 1820.576581] env[63279]: DEBUG nova.scheduler.client.report [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1820.583398] env[63279]: DEBUG nova.compute.manager [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1820.594171] env[63279]: DEBUG nova.scheduler.client.report [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1820.594448] env[63279]: DEBUG nova.compute.provider_tree [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1820.610601] env[63279]: DEBUG nova.scheduler.client.report [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1820.624627] env[63279]: DEBUG nova.virt.hardware [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1820.624823] env[63279]: DEBUG nova.virt.hardware [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1820.625026] env[63279]: DEBUG nova.virt.hardware [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1820.625105] env[63279]: DEBUG nova.virt.hardware [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1820.625244] env[63279]: DEBUG nova.virt.hardware [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1820.625789] env[63279]: DEBUG nova.virt.hardware [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1820.626102] env[63279]: DEBUG nova.virt.hardware [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1820.626408] env[63279]: DEBUG nova.virt.hardware [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1820.626483] env[63279]: DEBUG nova.virt.hardware [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1820.626617] env[63279]: DEBUG nova.virt.hardware [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1820.626792] env[63279]: DEBUG nova.virt.hardware [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1820.627695] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5640cee1-9cd9-4eb7-b169-8e230349f7e7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.636823] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475b0550-d0ab-47d0-bb8b-a03bbd393b55 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.643110] env[63279]: DEBUG nova.scheduler.client.report [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1820.647348] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1820.647719] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1820.647820] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Deleting the datastore file [datastore1] 4063d5e0-1144-40fa-8ed8-efda16730617 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1820.649236] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bafeaf90-8b37-4402-a64a-26f2291d0b74 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.667218] env[63279]: DEBUG oslo_vmware.api [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for the task: (returnval){ [ 1820.667218] env[63279]: value = "task-2086724" [ 1820.667218] env[63279]: _type = "Task" [ 1820.667218] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.681746] env[63279]: DEBUG oslo_vmware.api [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086724, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.683106] env[63279]: DEBUG nova.compute.manager [None req-8023bc91-633f-41ec-be29-30458ba4a599 tempest-ServerDiagnosticsTest-1323391275 tempest-ServerDiagnosticsTest-1323391275-project-admin] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1820.684283] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216dad85-665a-4699-b760-36e06e9e7033 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.692218] env[63279]: INFO nova.compute.manager [None req-8023bc91-633f-41ec-be29-30458ba4a599 tempest-ServerDiagnosticsTest-1323391275 tempest-ServerDiagnosticsTest-1323391275-project-admin] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Retrieving diagnostics [ 1820.695286] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458c1e70-6091-43f9-be58-025d9c156fb0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.734812] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6931e23b-1e81-4eab-aebf-1b452f1ef607 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Lock "a301d225-684d-4f88-bc9b-7e02b8115b9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.111s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.758362] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.758597] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.768500] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086719, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.633454} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.771263] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 24bce28c-fc43-4f17-9800-4d980f6729bc/24bce28c-fc43-4f17-9800-4d980f6729bc.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1820.771517] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1820.772129] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4835f29d-474f-4d4f-b0db-ff191c07c7b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.779489] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Waiting for the task: (returnval){ [ 1820.779489] env[63279]: value = "task-2086725" [ 1820.779489] env[63279]: _type = "Task" [ 1820.779489] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.790646] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086725, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.948759] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "refresh_cache-6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1820.949084] env[63279]: DEBUG nova.compute.manager [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Instance network_info: |[{"id": "bc9f3899-95c1-4e79-b121-03c9a2c0bc44", "address": "fa:16:3e:9a:65:be", "network": {"id": "548d80cd-fb6c-47fc-8c1d-036889987399", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-219167599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f6c6f65521a440fb80278bbff2d0ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc9f3899-95", "ovs_interfaceid": "bc9f3899-95c1-4e79-b121-03c9a2c0bc44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1820.949376] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086722, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.954558] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:65:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '678ebbe4-4c53-4eaf-a689-93981310f37d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc9f3899-95c1-4e79-b121-03c9a2c0bc44', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1820.969064] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Creating folder: Project (3f6c6f65521a440fb80278bbff2d0ed0). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1820.969689] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86b35a86-8e96-4bfe-b6c4-61777bc392f1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.985582] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Created folder: Project (3f6c6f65521a440fb80278bbff2d0ed0) in parent group-v427491. [ 1820.985582] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Creating folder: Instances. Parent ref: group-v427513. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1820.985582] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd686fef-ba6e-4b09-b6b7-36dfa053843c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.996615] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Created folder: Instances in parent group-v427513. [ 1820.996615] env[63279]: DEBUG oslo.service.loopingcall [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1820.996615] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1820.996615] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70412c98-0ac1-42c8-bf41-a9bd8d2d5861 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.015872] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.016059] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.016769] env[63279]: DEBUG nova.network.neutron [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1821.023864] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1821.023864] env[63279]: value = "task-2086728" [ 1821.023864] env[63279]: _type = "Task" [ 1821.023864] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.036459] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086728, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.038886] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1648ab88-8e51-4362-8b16-8237f67b47b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.048630] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac239093-0770-48e6-91ec-807359e4cc2a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.092213] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ae1a6a-7290-4212-b8c2-b49dd2a5f641 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.100321] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38d53a0-1218-4a40-9a40-9e2500ca716c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.115636] env[63279]: DEBUG nova.compute.provider_tree [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1821.179265] env[63279]: DEBUG oslo_vmware.api [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086724, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.237754] env[63279]: DEBUG nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1821.294808] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086725, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068106} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.294808] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1821.295122] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188e7d4c-6e55-431a-ba55-c9b9adbfa3c5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.326299] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 24bce28c-fc43-4f17-9800-4d980f6729bc/24bce28c-fc43-4f17-9800-4d980f6729bc.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1821.328029] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3bafcdb-2f31-4eba-9339-645be5bcc80b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.348482] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Waiting for the task: (returnval){ [ 1821.348482] env[63279]: value = "task-2086729" [ 1821.348482] env[63279]: _type = "Task" [ 1821.348482] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.359057] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086729, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.451415] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086722, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.535363] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086728, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.602697] env[63279]: DEBUG nova.compute.manager [req-c8ba2a16-d7ba-46fd-8c02-f3abe0b6b11e req-d9d3e7aa-b9b5-402d-ac46-f3b81a16045c service nova] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Received event network-changed-c3f2f036-d3a2-4b32-b33a-516605f2a1b1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1821.602915] env[63279]: DEBUG nova.compute.manager [req-c8ba2a16-d7ba-46fd-8c02-f3abe0b6b11e req-d9d3e7aa-b9b5-402d-ac46-f3b81a16045c service nova] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Refreshing instance network info cache due to event network-changed-c3f2f036-d3a2-4b32-b33a-516605f2a1b1. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1821.606704] env[63279]: DEBUG oslo_concurrency.lockutils [req-c8ba2a16-d7ba-46fd-8c02-f3abe0b6b11e req-d9d3e7aa-b9b5-402d-ac46-f3b81a16045c service nova] Acquiring lock "refresh_cache-21c2bba1-5482-496a-9e2a-f123a94ed48a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.606924] env[63279]: DEBUG oslo_concurrency.lockutils [req-c8ba2a16-d7ba-46fd-8c02-f3abe0b6b11e req-d9d3e7aa-b9b5-402d-ac46-f3b81a16045c service nova] Acquired lock "refresh_cache-21c2bba1-5482-496a-9e2a-f123a94ed48a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.607265] env[63279]: DEBUG nova.network.neutron [req-c8ba2a16-d7ba-46fd-8c02-f3abe0b6b11e req-d9d3e7aa-b9b5-402d-ac46-f3b81a16045c service nova] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Refreshing network info cache for port c3f2f036-d3a2-4b32-b33a-516605f2a1b1 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1821.616180] env[63279]: DEBUG nova.network.neutron [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1821.675617] env[63279]: DEBUG nova.scheduler.client.report [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 25 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1821.675889] env[63279]: DEBUG nova.compute.provider_tree [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 25 to 26 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1821.677022] env[63279]: DEBUG nova.compute.provider_tree [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1821.685813] env[63279]: DEBUG oslo_vmware.api [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Task: {'id': task-2086724, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.607558} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.686102] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1821.686316] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1821.686503] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1821.686696] env[63279]: INFO nova.compute.manager [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1821.686964] env[63279]: DEBUG oslo.service.loopingcall [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1821.687177] env[63279]: DEBUG nova.compute.manager [-] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1821.687272] env[63279]: DEBUG nova.network.neutron [-] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1821.761274] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.862132] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086729, 'name': ReconfigVM_Task, 'duration_secs': 0.421388} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.862520] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 24bce28c-fc43-4f17-9800-4d980f6729bc/24bce28c-fc43-4f17-9800-4d980f6729bc.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1821.863329] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec0abe6e-de6a-4ead-9248-89de50e83823 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.871589] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Waiting for the task: (returnval){ [ 1821.871589] env[63279]: value = "task-2086730" [ 1821.871589] env[63279]: _type = "Task" [ 1821.871589] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.881771] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086730, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.953166] env[63279]: DEBUG oslo_vmware.api [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086722, 'name': PowerOnVM_Task, 'duration_secs': 1.337302} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.953473] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1821.954119] env[63279]: INFO nova.compute.manager [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Took 13.77 seconds to spawn the instance on the hypervisor. [ 1821.954119] env[63279]: DEBUG nova.compute.manager [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1821.954847] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a28d89-cd45-4050-9032-07b01c460d35 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.016310] env[63279]: DEBUG nova.network.neutron [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Successfully updated port: 63a3f09b-45d0-46eb-a197-186f1ec415d6 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1822.042507] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086728, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.119201] env[63279]: DEBUG nova.network.neutron [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating instance_info_cache with network_info: [{"id": "2cc4a33a-bd88-4aec-a588-7c821bebf971", "address": "fa:16:3e:de:77:2e", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc4a33a-bd", "ovs_interfaceid": "2cc4a33a-bd88-4aec-a588-7c821bebf971", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.185653] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.646s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1822.186245] env[63279]: DEBUG nova.compute.manager [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1822.193024] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.854s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.193024] env[63279]: INFO nova.compute.claims [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1822.383911] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086730, 'name': Rename_Task, 'duration_secs': 0.151907} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.385345] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1822.385669] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d99244c-02af-4d7e-9841-8046ba817277 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.392959] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Waiting for the task: (returnval){ [ 1822.392959] env[63279]: value = "task-2086731" [ 1822.392959] env[63279]: _type = "Task" [ 1822.392959] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.402860] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086731, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.475916] env[63279]: INFO nova.compute.manager [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Took 27.94 seconds to build instance. [ 1822.516489] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "refresh_cache-4a9088e0-2992-4b18-8be9-6bc70633369b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.518665] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "refresh_cache-4a9088e0-2992-4b18-8be9-6bc70633369b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.518665] env[63279]: DEBUG nova.network.neutron [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1822.543397] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086728, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.625807] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Releasing lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.625807] env[63279]: DEBUG nova.compute.manager [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Instance network_info: |[{"id": "2cc4a33a-bd88-4aec-a588-7c821bebf971", "address": "fa:16:3e:de:77:2e", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc4a33a-bd", "ovs_interfaceid": "2cc4a33a-bd88-4aec-a588-7c821bebf971", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1822.626131] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:77:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2cc4a33a-bd88-4aec-a588-7c821bebf971', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1822.634902] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Creating folder: Project (0b41258d6a444b558b3f3256f2f7d6eb). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1822.635244] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1dd658af-5d60-4335-896d-fd2733a57701 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.646959] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Created folder: Project (0b41258d6a444b558b3f3256f2f7d6eb) in parent group-v427491. [ 1822.647172] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Creating folder: Instances. Parent ref: group-v427516. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1822.647415] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c6442cb-38e5-47f8-b560-e53adbefc313 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.656897] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Created folder: Instances in parent group-v427516. [ 1822.658251] env[63279]: DEBUG oslo.service.loopingcall [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1822.658251] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1822.658251] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5840a3c1-c2b0-4fc6-92d5-603ea3c6d399 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.681626] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1822.681626] env[63279]: value = "task-2086734" [ 1822.681626] env[63279]: _type = "Task" [ 1822.681626] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.689941] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086734, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.697694] env[63279]: DEBUG nova.compute.utils [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1822.704188] env[63279]: DEBUG nova.compute.manager [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Not allocating networking since 'none' was specified. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1822.724022] env[63279]: DEBUG nova.compute.manager [req-372bb5e2-2b9a-43ef-8f00-15c88ba9729f req-a8861263-a3bc-41b3-b857-148b3a7770f6 service nova] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Received event network-changed-c3997b27-53bf-4f2d-828a-a55447612331 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1822.724253] env[63279]: DEBUG nova.compute.manager [req-372bb5e2-2b9a-43ef-8f00-15c88ba9729f req-a8861263-a3bc-41b3-b857-148b3a7770f6 service nova] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Refreshing instance network info cache due to event network-changed-c3997b27-53bf-4f2d-828a-a55447612331. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1822.724466] env[63279]: DEBUG oslo_concurrency.lockutils [req-372bb5e2-2b9a-43ef-8f00-15c88ba9729f req-a8861263-a3bc-41b3-b857-148b3a7770f6 service nova] Acquiring lock "refresh_cache-24bce28c-fc43-4f17-9800-4d980f6729bc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.724614] env[63279]: DEBUG oslo_concurrency.lockutils [req-372bb5e2-2b9a-43ef-8f00-15c88ba9729f req-a8861263-a3bc-41b3-b857-148b3a7770f6 service nova] Acquired lock "refresh_cache-24bce28c-fc43-4f17-9800-4d980f6729bc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.724933] env[63279]: DEBUG nova.network.neutron [req-372bb5e2-2b9a-43ef-8f00-15c88ba9729f req-a8861263-a3bc-41b3-b857-148b3a7770f6 service nova] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Refreshing network info cache for port c3997b27-53bf-4f2d-828a-a55447612331 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1822.769538] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "32e84715-0345-4171-abb7-c034a501347e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.770009] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "32e84715-0345-4171-abb7-c034a501347e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.903256] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086731, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.978589] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c276986-9767-495b-b2d0-91b2bfe4ca4e tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Lock "21c2bba1-5482-496a-9e2a-f123a94ed48a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.455s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.035644] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086728, 'name': CreateVM_Task, 'duration_secs': 1.549924} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.035830] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1823.036096] env[63279]: DEBUG nova.network.neutron [-] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.037659] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.037991] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.038169] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1823.038655] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9691f924-9197-4a15-b650-1697c9bc4406 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.044910] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1823.044910] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52775be9-df41-897c-a30b-d2d5daad502c" [ 1823.044910] env[63279]: _type = "Task" [ 1823.044910] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.052561] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52775be9-df41-897c-a30b-d2d5daad502c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.068998] env[63279]: DEBUG nova.network.neutron [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1823.078394] env[63279]: DEBUG nova.network.neutron [req-c8ba2a16-d7ba-46fd-8c02-f3abe0b6b11e req-d9d3e7aa-b9b5-402d-ac46-f3b81a16045c service nova] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Updated VIF entry in instance network info cache for port c3f2f036-d3a2-4b32-b33a-516605f2a1b1. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1823.080757] env[63279]: DEBUG nova.network.neutron [req-c8ba2a16-d7ba-46fd-8c02-f3abe0b6b11e req-d9d3e7aa-b9b5-402d-ac46-f3b81a16045c service nova] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Updating instance_info_cache with network_info: [{"id": "c3f2f036-d3a2-4b32-b33a-516605f2a1b1", "address": "fa:16:3e:a0:0e:b7", "network": {"id": "aad6915d-e456-4136-9ba0-33423379f7e8", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-781920893-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abb95b37d6914c4f8624f2f924a82f3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3f2f036-d3", "ovs_interfaceid": "c3f2f036-d3a2-4b32-b33a-516605f2a1b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.191916] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086734, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.204814] env[63279]: DEBUG nova.compute.manager [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1823.332124] env[63279]: DEBUG nova.network.neutron [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Updating instance_info_cache with network_info: [{"id": "63a3f09b-45d0-46eb-a197-186f1ec415d6", "address": "fa:16:3e:71:bc:13", "network": {"id": "548d80cd-fb6c-47fc-8c1d-036889987399", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-219167599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f6c6f65521a440fb80278bbff2d0ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63a3f09b-45", "ovs_interfaceid": "63a3f09b-45d0-46eb-a197-186f1ec415d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.407170] env[63279]: DEBUG oslo_vmware.api [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086731, 'name': PowerOnVM_Task, 'duration_secs': 0.780248} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.410829] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1823.411106] env[63279]: INFO nova.compute.manager [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Took 12.82 seconds to spawn the instance on the hypervisor. [ 1823.411545] env[63279]: DEBUG nova.compute.manager [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1823.412346] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a55aa02-f2ec-423d-87db-7bc7288bc7fd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.484719] env[63279]: DEBUG nova.compute.manager [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1823.539771] env[63279]: INFO nova.compute.manager [-] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Took 1.85 seconds to deallocate network for instance. [ 1823.565142] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52775be9-df41-897c-a30b-d2d5daad502c, 'name': SearchDatastore_Task, 'duration_secs': 0.009411} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.565142] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.565142] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1823.565142] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.565323] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.565323] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1823.565323] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aab5eec7-cbd7-42c8-8e29-6d4592d4634f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.577995] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1823.578286] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1823.579228] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-834e5014-8e8c-4867-90ac-2b62b28d74ff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.583559] env[63279]: DEBUG oslo_concurrency.lockutils [req-c8ba2a16-d7ba-46fd-8c02-f3abe0b6b11e req-d9d3e7aa-b9b5-402d-ac46-f3b81a16045c service nova] Releasing lock "refresh_cache-21c2bba1-5482-496a-9e2a-f123a94ed48a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.583559] env[63279]: DEBUG nova.compute.manager [req-c8ba2a16-d7ba-46fd-8c02-f3abe0b6b11e req-d9d3e7aa-b9b5-402d-ac46-f3b81a16045c service nova] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Received event network-vif-plugged-bc9f3899-95c1-4e79-b121-03c9a2c0bc44 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1823.583559] env[63279]: DEBUG oslo_concurrency.lockutils [req-c8ba2a16-d7ba-46fd-8c02-f3abe0b6b11e req-d9d3e7aa-b9b5-402d-ac46-f3b81a16045c service nova] Acquiring lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.583559] env[63279]: DEBUG oslo_concurrency.lockutils [req-c8ba2a16-d7ba-46fd-8c02-f3abe0b6b11e req-d9d3e7aa-b9b5-402d-ac46-f3b81a16045c service nova] Lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.583559] env[63279]: DEBUG oslo_concurrency.lockutils [req-c8ba2a16-d7ba-46fd-8c02-f3abe0b6b11e req-d9d3e7aa-b9b5-402d-ac46-f3b81a16045c service nova] Lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.583836] env[63279]: DEBUG nova.compute.manager [req-c8ba2a16-d7ba-46fd-8c02-f3abe0b6b11e req-d9d3e7aa-b9b5-402d-ac46-f3b81a16045c service nova] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] No waiting events found dispatching network-vif-plugged-bc9f3899-95c1-4e79-b121-03c9a2c0bc44 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1823.583836] env[63279]: WARNING nova.compute.manager [req-c8ba2a16-d7ba-46fd-8c02-f3abe0b6b11e req-d9d3e7aa-b9b5-402d-ac46-f3b81a16045c service nova] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Received unexpected event network-vif-plugged-bc9f3899-95c1-4e79-b121-03c9a2c0bc44 for instance with vm_state building and task_state spawning. [ 1823.589400] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1823.589400] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5264824e-ef36-1fd8-53ca-0fbbc015cc95" [ 1823.589400] env[63279]: _type = "Task" [ 1823.589400] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.597789] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5264824e-ef36-1fd8-53ca-0fbbc015cc95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.650037] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0b7cd0-fddb-4433-9950-3fdd6bc0cfb8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.657862] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102e7e4d-515c-44c6-bbd2-ed47d3dd0cec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.698032] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1078df97-1b39-4eef-8ea4-47d002a5b028 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.709481] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086734, 'name': CreateVM_Task, 'duration_secs': 0.546271} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.711254] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c075f736-c6bf-4442-88cc-90be1a343b0e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.715765] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1823.723360] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.723360] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.723360] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1823.724620] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-662aff7e-367c-45c6-9845-7a5931576eba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.738158] env[63279]: DEBUG nova.compute.provider_tree [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1823.742910] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 1823.742910] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52abca0e-6290-d4f2-e937-9153a2706543" [ 1823.742910] env[63279]: _type = "Task" [ 1823.742910] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.750930] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52abca0e-6290-d4f2-e937-9153a2706543, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.808159] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "5d4be656-defe-4332-b97e-e88b107ca4a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.808382] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "5d4be656-defe-4332-b97e-e88b107ca4a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.832439] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "357f08c9-4de9-4b84-8384-6bf130872f40" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.832680] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "357f08c9-4de9-4b84-8384-6bf130872f40" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.834173] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "refresh_cache-4a9088e0-2992-4b18-8be9-6bc70633369b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.834456] env[63279]: DEBUG nova.compute.manager [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Instance network_info: |[{"id": "63a3f09b-45d0-46eb-a197-186f1ec415d6", "address": "fa:16:3e:71:bc:13", "network": {"id": "548d80cd-fb6c-47fc-8c1d-036889987399", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-219167599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f6c6f65521a440fb80278bbff2d0ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63a3f09b-45", "ovs_interfaceid": "63a3f09b-45d0-46eb-a197-186f1ec415d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1823.835035] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:bc:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '678ebbe4-4c53-4eaf-a689-93981310f37d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63a3f09b-45d0-46eb-a197-186f1ec415d6', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1823.842636] env[63279]: DEBUG oslo.service.loopingcall [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1823.843150] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1823.843404] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-09830204-f82f-45ed-9763-5d814464285f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.865383] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1823.865383] env[63279]: value = "task-2086735" [ 1823.865383] env[63279]: _type = "Task" [ 1823.865383] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.873939] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086735, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.937328] env[63279]: INFO nova.compute.manager [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Took 29.27 seconds to build instance. [ 1824.005462] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.050433] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.100035] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5264824e-ef36-1fd8-53ca-0fbbc015cc95, 'name': SearchDatastore_Task, 'duration_secs': 0.022889} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.100769] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1678707-a457-4c6d-9395-c5bb3624bacc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.110362] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1824.110362] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5219efcb-7cd2-5382-bf08-2a332453c548" [ 1824.110362] env[63279]: _type = "Task" [ 1824.110362] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.118425] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5219efcb-7cd2-5382-bf08-2a332453c548, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.193433] env[63279]: DEBUG nova.network.neutron [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Successfully updated port: dc489c36-2ef9-4f47-aadb-4f6503bb9d1b {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1824.226730] env[63279]: DEBUG nova.compute.manager [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1824.241986] env[63279]: DEBUG nova.scheduler.client.report [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1824.263814] env[63279]: DEBUG nova.virt.hardware [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1824.264461] env[63279]: DEBUG nova.virt.hardware [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1824.264461] env[63279]: DEBUG nova.virt.hardware [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1824.264461] env[63279]: DEBUG nova.virt.hardware [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1824.264650] env[63279]: DEBUG nova.virt.hardware [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1824.264710] env[63279]: DEBUG nova.virt.hardware [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1824.264939] env[63279]: DEBUG nova.virt.hardware [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1824.265324] env[63279]: DEBUG nova.virt.hardware [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1824.265529] env[63279]: DEBUG nova.virt.hardware [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1824.265728] env[63279]: DEBUG nova.virt.hardware [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1824.265977] env[63279]: DEBUG nova.virt.hardware [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1824.266843] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c28b24-fa2c-4264-9f3c-900f9ef99b24 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.275313] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52abca0e-6290-d4f2-e937-9153a2706543, 'name': SearchDatastore_Task, 'duration_secs': 0.016885} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.276027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.276288] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1824.276740] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.281075] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c0b199c-1f18-4656-8641-f6e0eb5fd38e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.299356] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Instance VIF info [] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1824.305717] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Creating folder: Project (55786c06fb7b42eb8e68e9a333f84102). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1824.306304] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1c6cec0-f9b8-4e91-8635-3b8c45d47c49 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.321263] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Created folder: Project (55786c06fb7b42eb8e68e9a333f84102) in parent group-v427491. [ 1824.321263] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Creating folder: Instances. Parent ref: group-v427520. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1824.321263] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a048d26-dd01-4f34-8521-c4393185858b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.332242] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Created folder: Instances in parent group-v427520. [ 1824.332552] env[63279]: DEBUG oslo.service.loopingcall [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1824.332824] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1824.333092] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f57421b-f01d-451b-9ab6-b790cff2133d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.350602] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1824.350602] env[63279]: value = "task-2086738" [ 1824.350602] env[63279]: _type = "Task" [ 1824.350602] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.359449] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086738, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.376536] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086735, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.394859] env[63279]: DEBUG nova.network.neutron [req-372bb5e2-2b9a-43ef-8f00-15c88ba9729f req-a8861263-a3bc-41b3-b857-148b3a7770f6 service nova] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Updated VIF entry in instance network info cache for port c3997b27-53bf-4f2d-828a-a55447612331. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1824.395284] env[63279]: DEBUG nova.network.neutron [req-372bb5e2-2b9a-43ef-8f00-15c88ba9729f req-a8861263-a3bc-41b3-b857-148b3a7770f6 service nova] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Updating instance_info_cache with network_info: [{"id": "c3997b27-53bf-4f2d-828a-a55447612331", "address": "fa:16:3e:86:fe:67", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3997b27-53", "ovs_interfaceid": "c3997b27-53bf-4f2d-828a-a55447612331", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.434351] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Acquiring lock "ac1d0e8f-446a-4a6d-a916-08f52426396d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.434723] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Lock "ac1d0e8f-446a-4a6d-a916-08f52426396d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.435028] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Acquiring lock "ac1d0e8f-446a-4a6d-a916-08f52426396d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.435301] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Lock "ac1d0e8f-446a-4a6d-a916-08f52426396d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.435551] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Lock "ac1d0e8f-446a-4a6d-a916-08f52426396d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.438356] env[63279]: INFO nova.compute.manager [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Terminating instance [ 1824.440506] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96a3c836-4e66-45c5-95bd-3126c6132246 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Lock "24bce28c-fc43-4f17-9800-4d980f6729bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.778s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.623695] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5219efcb-7cd2-5382-bf08-2a332453c548, 'name': SearchDatastore_Task, 'duration_secs': 0.010023} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.624254] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.624254] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51/6388f912-ae70-4e8f-b8e4-ceb02e0f8a51.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1824.624523] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.624707] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1824.624912] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c637c519-1ada-478c-ba43-c25cc59f9998 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.627720] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c91b27c-fc36-4a69-905d-edde20176817 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.634890] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1824.634890] env[63279]: value = "task-2086739" [ 1824.634890] env[63279]: _type = "Task" [ 1824.634890] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.641018] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1824.641398] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1824.644151] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66d0b1c5-89a9-434d-9558-770fbf636f09 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.653815] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086739, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.658346] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 1824.658346] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521283c2-87e8-9b6a-d37e-023b60706b37" [ 1824.658346] env[63279]: _type = "Task" [ 1824.658346] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.667358] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521283c2-87e8-9b6a-d37e-023b60706b37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.696416] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Acquiring lock "refresh_cache-7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.696416] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Acquired lock "refresh_cache-7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.696702] env[63279]: DEBUG nova.network.neutron [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1824.753255] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.753830] env[63279]: DEBUG nova.compute.manager [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1824.756971] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.159s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.758785] env[63279]: INFO nova.compute.claims [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1824.860817] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086738, 'name': CreateVM_Task, 'duration_secs': 0.321511} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.864021] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1824.864021] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.864021] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.864021] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1824.864021] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ebb5591-2b24-4b28-8261-b2521490cb13 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.870324] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Waiting for the task: (returnval){ [ 1824.870324] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d3c99e-ea34-05cc-c485-8b6da7c3d468" [ 1824.870324] env[63279]: _type = "Task" [ 1824.870324] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.876984] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086735, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.884380] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d3c99e-ea34-05cc-c485-8b6da7c3d468, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.901149] env[63279]: DEBUG oslo_concurrency.lockutils [req-372bb5e2-2b9a-43ef-8f00-15c88ba9729f req-a8861263-a3bc-41b3-b857-148b3a7770f6 service nova] Releasing lock "refresh_cache-24bce28c-fc43-4f17-9800-4d980f6729bc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.944224] env[63279]: DEBUG nova.compute.manager [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1824.950242] env[63279]: DEBUG nova.compute.manager [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1824.950242] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1824.950242] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2604ce1-b87e-4783-b72a-48e233bce7c4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.962434] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1824.962720] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bc469dc9-f8a4-43b5-a35c-558b0136114d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.976023] env[63279]: DEBUG oslo_vmware.api [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Waiting for the task: (returnval){ [ 1824.976023] env[63279]: value = "task-2086740" [ 1824.976023] env[63279]: _type = "Task" [ 1824.976023] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.986225] env[63279]: DEBUG oslo_vmware.api [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086740, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.145076] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086739, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.172227] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521283c2-87e8-9b6a-d37e-023b60706b37, 'name': SearchDatastore_Task, 'duration_secs': 0.01038} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.173096] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3173cdc0-d931-405e-a661-357390ebf42c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.181300] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 1825.181300] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bdb9a8-25d0-72c7-7940-8a9f86329fec" [ 1825.181300] env[63279]: _type = "Task" [ 1825.181300] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.191141] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bdb9a8-25d0-72c7-7940-8a9f86329fec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.265810] env[63279]: DEBUG nova.compute.utils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1825.274942] env[63279]: DEBUG nova.network.neutron [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1825.277440] env[63279]: DEBUG nova.compute.manager [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1825.277440] env[63279]: DEBUG nova.network.neutron [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1825.399557] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086735, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.400329] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d3c99e-ea34-05cc-c485-8b6da7c3d468, 'name': SearchDatastore_Task, 'duration_secs': 0.017806} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.407180] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.407517] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1825.407736] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.413159] env[63279]: DEBUG nova.compute.manager [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Received event network-changed-bc9f3899-95c1-4e79-b121-03c9a2c0bc44 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1825.413159] env[63279]: DEBUG nova.compute.manager [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Refreshing instance network info cache due to event network-changed-bc9f3899-95c1-4e79-b121-03c9a2c0bc44. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1825.413159] env[63279]: DEBUG oslo_concurrency.lockutils [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] Acquiring lock "refresh_cache-6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.413159] env[63279]: DEBUG oslo_concurrency.lockutils [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] Acquired lock "refresh_cache-6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.413159] env[63279]: DEBUG nova.network.neutron [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Refreshing network info cache for port bc9f3899-95c1-4e79-b121-03c9a2c0bc44 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1825.416644] env[63279]: DEBUG nova.policy [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77406eb52a284e74940702e43de18783', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dfdf69a97cf54d5cb8c4fb1c59b6a5d0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1825.483796] env[63279]: DEBUG oslo_vmware.api [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086740, 'name': PowerOffVM_Task, 'duration_secs': 0.496472} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.484947] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.485684] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1825.485921] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1825.491997] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94d6844d-742e-44df-b6d2-594de135e324 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.580174] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Acquiring lock "ecedded1-7169-49a4-8a9e-2fe4086db986" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.580555] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Lock "ecedded1-7169-49a4-8a9e-2fe4086db986" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.585990] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1825.586267] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1825.586838] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Deleting the datastore file [datastore1] ac1d0e8f-446a-4a6d-a916-08f52426396d {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1825.587563] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad51dec5-1721-43a0-9f3a-22f0f3e18a1b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.596664] env[63279]: DEBUG oslo_vmware.api [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Waiting for the task: (returnval){ [ 1825.596664] env[63279]: value = "task-2086742" [ 1825.596664] env[63279]: _type = "Task" [ 1825.596664] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.608466] env[63279]: DEBUG oslo_vmware.api [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086742, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.657847] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086739, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.752575} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.658114] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51/6388f912-ae70-4e8f-b8e4-ceb02e0f8a51.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1825.658325] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1825.658767] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9dd96873-f843-402d-80b8-6b98094a19fc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.667474] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1825.667474] env[63279]: value = "task-2086743" [ 1825.667474] env[63279]: _type = "Task" [ 1825.667474] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.673134] env[63279]: DEBUG nova.network.neutron [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Updating instance_info_cache with network_info: [{"id": "dc489c36-2ef9-4f47-aadb-4f6503bb9d1b", "address": "fa:16:3e:37:b8:f9", "network": {"id": "6baee8f1-a2e9-4c94-b06b-d371b1fd5ecd", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-669727083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afdc0a67b163421aafe94f0392a66fa8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c0d5204b-f60e-4830-84c8-2fe246c28202", "external-id": "nsx-vlan-transportzone-104", "segmentation_id": 104, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc489c36-2e", "ovs_interfaceid": "dc489c36-2ef9-4f47-aadb-4f6503bb9d1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.680635] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086743, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.695405] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bdb9a8-25d0-72c7-7940-8a9f86329fec, 'name': SearchDatastore_Task, 'duration_secs': 0.068101} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.695648] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.695923] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 5656c853-ac83-47be-83c4-979a9e87ab91/5656c853-ac83-47be-83c4-979a9e87ab91.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1825.696979] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.696979] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1825.696979] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ac93868-f004-4a75-9632-ee3715771af9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.698974] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8c10cba-ab16-4083-8743-528330921563 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.705999] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 1825.705999] env[63279]: value = "task-2086744" [ 1825.705999] env[63279]: _type = "Task" [ 1825.705999] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.711323] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1825.711323] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1825.716291] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c907c95-d37b-4ae4-ba1e-b7fbbe359cab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.717732] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086744, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.721203] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Waiting for the task: (returnval){ [ 1825.721203] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c76102-34af-ff0c-4f66-889aec8cdbf9" [ 1825.721203] env[63279]: _type = "Task" [ 1825.721203] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.731283] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c76102-34af-ff0c-4f66-889aec8cdbf9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.777446] env[63279]: DEBUG nova.compute.manager [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1825.883358] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086735, 'name': CreateVM_Task, 'duration_secs': 1.529783} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.883481] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1825.886128] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.886254] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.886838] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1825.886891] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a98ccf07-c223-479d-83b9-802c68aa6515 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.892562] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1825.892562] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5262f657-c70c-358a-6851-fb85220caf73" [ 1825.892562] env[63279]: _type = "Task" [ 1825.892562] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.901430] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5262f657-c70c-358a-6851-fb85220caf73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.110378] env[63279]: DEBUG oslo_vmware.api [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Task: {'id': task-2086742, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.233601} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.110666] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1826.110855] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1826.111046] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1826.111226] env[63279]: INFO nova.compute.manager [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1826.111914] env[63279]: DEBUG oslo.service.loopingcall [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1826.111914] env[63279]: DEBUG nova.compute.manager [-] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1826.111914] env[63279]: DEBUG nova.network.neutron [-] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1826.134373] env[63279]: DEBUG nova.network.neutron [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Successfully created port: e1063b89-be88-474a-a2ec-b61eb11cf9fe {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1826.151995] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Acquiring lock "a301d225-684d-4f88-bc9b-7e02b8115b9d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.152300] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Lock "a301d225-684d-4f88-bc9b-7e02b8115b9d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1826.152561] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Acquiring lock "a301d225-684d-4f88-bc9b-7e02b8115b9d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.152769] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Lock "a301d225-684d-4f88-bc9b-7e02b8115b9d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1826.152946] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Lock "a301d225-684d-4f88-bc9b-7e02b8115b9d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.156118] env[63279]: INFO nova.compute.manager [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Terminating instance [ 1826.176789] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Releasing lock "refresh_cache-7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.177161] env[63279]: DEBUG nova.compute.manager [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Instance network_info: |[{"id": "dc489c36-2ef9-4f47-aadb-4f6503bb9d1b", "address": "fa:16:3e:37:b8:f9", "network": {"id": "6baee8f1-a2e9-4c94-b06b-d371b1fd5ecd", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-669727083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afdc0a67b163421aafe94f0392a66fa8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c0d5204b-f60e-4830-84c8-2fe246c28202", "external-id": "nsx-vlan-transportzone-104", "segmentation_id": 104, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc489c36-2e", "ovs_interfaceid": "dc489c36-2ef9-4f47-aadb-4f6503bb9d1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1826.181215] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:b8:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c0d5204b-f60e-4830-84c8-2fe246c28202', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc489c36-2ef9-4f47-aadb-4f6503bb9d1b', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1826.189242] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Creating folder: Project (afdc0a67b163421aafe94f0392a66fa8). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1826.189626] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086743, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077198} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.193303] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3a82de8-6c4b-4167-9ddc-c46076febab6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.195716] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1826.196955] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2835fe7c-6bb0-4d7a-8050-1875a26e2e4b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.225740] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51/6388f912-ae70-4e8f-b8e4-ceb02e0f8a51.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1826.232434] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b45b3ed-1ed2-4e34-9f0b-77c9b4ec8985 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.264850] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086744, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.265653] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1826.265653] env[63279]: value = "task-2086746" [ 1826.265653] env[63279]: _type = "Task" [ 1826.265653] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.267480] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Created folder: Project (afdc0a67b163421aafe94f0392a66fa8) in parent group-v427491. [ 1826.268398] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Creating folder: Instances. Parent ref: group-v427523. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1826.271702] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81574ff6-c766-48c7-b9a2-5e3a6e03b442 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.279299] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceafa906-443b-4029-b8a1-643d3f98dd1c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.282097] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c76102-34af-ff0c-4f66-889aec8cdbf9, 'name': SearchDatastore_Task, 'duration_secs': 0.019952} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.284426] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbe33ff2-1810-4c1a-bf57-2c06af80a6a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.296732] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086746, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.299286] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757416f4-9633-45be-921e-dce7e9b8ffc1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.303068] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Created folder: Instances in parent group-v427523. [ 1826.303068] env[63279]: DEBUG oslo.service.loopingcall [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1826.304754] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1826.304861] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Waiting for the task: (returnval){ [ 1826.304861] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]520650b7-7ede-2ab4-f344-0227767bb308" [ 1826.304861] env[63279]: _type = "Task" [ 1826.304861] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.305533] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b62a3cf-2fa7-4fc1-a91a-b681062e277e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.351316] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37265d30-81e7-407e-b207-5cf915865b80 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.355900] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1826.355900] env[63279]: value = "task-2086748" [ 1826.355900] env[63279]: _type = "Task" [ 1826.355900] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.359930] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520650b7-7ede-2ab4-f344-0227767bb308, 'name': SearchDatastore_Task, 'duration_secs': 0.01681} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.365174] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.365469] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] abd63285-ee3c-4546-b86d-6d4388765d94/abd63285-ee3c-4546-b86d-6d4388765d94.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1826.366047] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-804a28a1-050a-4de1-91ae-8f599018f71f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.368801] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013787b3-f82d-4ab1-a30f-627973ee9c35 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.379503] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086748, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.388624] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Waiting for the task: (returnval){ [ 1826.388624] env[63279]: value = "task-2086749" [ 1826.388624] env[63279]: _type = "Task" [ 1826.388624] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.388944] env[63279]: DEBUG nova.compute.provider_tree [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1826.405345] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "ff2f355a-9687-4491-b243-6133e4b7b866" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.405557] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "ff2f355a-9687-4491-b243-6133e4b7b866" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1826.410221] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086749, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.413840] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5262f657-c70c-358a-6851-fb85220caf73, 'name': SearchDatastore_Task, 'duration_secs': 0.010172} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.414511] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.414984] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1826.414984] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.415103] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.415218] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1826.415701] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04056520-75a7-4cd3-bba3-85b692b3eb68 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.426291] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1826.426603] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1826.427302] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-216d499f-d632-4b65-962e-702e0af21a01 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.434433] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1826.434433] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]525109f2-b769-3cf8-bdc4-03d61c186084" [ 1826.434433] env[63279]: _type = "Task" [ 1826.434433] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.443372] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525109f2-b769-3cf8-bdc4-03d61c186084, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.482566] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.662013] env[63279]: DEBUG nova.compute.manager [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1826.662405] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1826.663677] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6065f1-0e19-44ee-9c8a-a02ca6f4fd5a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.679559] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1826.679919] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c466fe07-1d74-4c7f-aa24-460716c8dbd3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.688982] env[63279]: DEBUG oslo_vmware.api [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Waiting for the task: (returnval){ [ 1826.688982] env[63279]: value = "task-2086750" [ 1826.688982] env[63279]: _type = "Task" [ 1826.688982] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.703590] env[63279]: DEBUG oslo_vmware.api [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Task: {'id': task-2086750, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.731105] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086744, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575515} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.731801] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 5656c853-ac83-47be-83c4-979a9e87ab91/5656c853-ac83-47be-83c4-979a9e87ab91.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1826.732338] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1826.732338] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-524daa76-4d3b-4aaa-b6d6-06696a761a34 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.745118] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 1826.745118] env[63279]: value = "task-2086751" [ 1826.745118] env[63279]: _type = "Task" [ 1826.745118] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.752101] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086751, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.765602] env[63279]: DEBUG nova.network.neutron [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Updated VIF entry in instance network info cache for port bc9f3899-95c1-4e79-b121-03c9a2c0bc44. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1826.766017] env[63279]: DEBUG nova.network.neutron [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Updating instance_info_cache with network_info: [{"id": "bc9f3899-95c1-4e79-b121-03c9a2c0bc44", "address": "fa:16:3e:9a:65:be", "network": {"id": "548d80cd-fb6c-47fc-8c1d-036889987399", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-219167599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f6c6f65521a440fb80278bbff2d0ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc9f3899-95", "ovs_interfaceid": "bc9f3899-95c1-4e79-b121-03c9a2c0bc44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.779444] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086746, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.799089] env[63279]: DEBUG nova.compute.manager [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1826.870523] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086748, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.894046] env[63279]: DEBUG nova.scheduler.client.report [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1826.914910] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086749, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.948068] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525109f2-b769-3cf8-bdc4-03d61c186084, 'name': SearchDatastore_Task, 'duration_secs': 0.013524} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.949279] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2794fc95-9ff9-4e84-a279-d08c19ba169e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.960438] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1826.960438] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bb1151-99fa-2f61-9b17-3c24f3c42e16" [ 1826.960438] env[63279]: _type = "Task" [ 1826.960438] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.971490] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bb1151-99fa-2f61-9b17-3c24f3c42e16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.978496] env[63279]: DEBUG nova.virt.hardware [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1826.978496] env[63279]: DEBUG nova.virt.hardware [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1826.978784] env[63279]: DEBUG nova.virt.hardware [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1826.979065] env[63279]: DEBUG nova.virt.hardware [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1826.979341] env[63279]: DEBUG nova.virt.hardware [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1826.979559] env[63279]: DEBUG nova.virt.hardware [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1826.980172] env[63279]: DEBUG nova.virt.hardware [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1826.980172] env[63279]: DEBUG nova.virt.hardware [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1826.980331] env[63279]: DEBUG nova.virt.hardware [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1826.980496] env[63279]: DEBUG nova.virt.hardware [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1826.980716] env[63279]: DEBUG nova.virt.hardware [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1826.981762] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817cd6be-f788-45f1-b777-ff4312daf530 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.988595] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Getting list of instances from cluster (obj){ [ 1826.988595] env[63279]: value = "domain-c8" [ 1826.988595] env[63279]: _type = "ClusterComputeResource" [ 1826.988595] env[63279]: } {{(pid=63279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1826.993140] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5dfa0c-1c70-41a5-9d00-328cbcd20d48 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.002756] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c053cb40-7bdc-469b-98f9-6ce2e24e1cb6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.025537] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Got total of 9 instances {{(pid=63279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1827.025537] env[63279]: WARNING nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] While synchronizing instance power states, found 14 instances in the database and 9 instances on the hypervisor. [ 1827.025537] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid 0224e4ea-c13c-4abd-9626-6038c0bbe4e9 {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 1827.025537] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid 4063d5e0-1144-40fa-8ed8-efda16730617 {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 1827.025537] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid ac1d0e8f-446a-4a6d-a916-08f52426396d {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 1827.025537] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid 8c712d0d-55c2-4a14-b759-9441594211e1 {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 1827.025537] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid a301d225-684d-4f88-bc9b-7e02b8115b9d {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 1827.025943] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid 21c2bba1-5482-496a-9e2a-f123a94ed48a {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 1827.025943] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid 24bce28c-fc43-4f17-9800-4d980f6729bc {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 1827.025943] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51 {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 1827.025943] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid 5656c853-ac83-47be-83c4-979a9e87ab91 {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 1827.025943] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid 4a9088e0-2992-4b18-8be9-6bc70633369b {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 1827.025943] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 1827.025943] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid abd63285-ee3c-4546-b86d-6d4388765d94 {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 1827.026321] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid fcc5a636-554f-424e-a604-a8e7bd7cf574 {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 1827.026321] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid c05e9aaa-e251-480c-94d6-56c29bb6282d {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 1827.034272] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.034631] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.035021] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "4063d5e0-1144-40fa-8ed8-efda16730617" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.035409] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "ac1d0e8f-446a-4a6d-a916-08f52426396d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.035690] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "8c712d0d-55c2-4a14-b759-9441594211e1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.035987] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "8c712d0d-55c2-4a14-b759-9441594211e1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.036418] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "a301d225-684d-4f88-bc9b-7e02b8115b9d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.036757] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "21c2bba1-5482-496a-9e2a-f123a94ed48a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.036965] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "21c2bba1-5482-496a-9e2a-f123a94ed48a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.039017] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "24bce28c-fc43-4f17-9800-4d980f6729bc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.039017] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "24bce28c-fc43-4f17-9800-4d980f6729bc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.039017] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.039017] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "5656c853-ac83-47be-83c4-979a9e87ab91" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.039312] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "4a9088e0-2992-4b18-8be9-6bc70633369b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.039312] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.039312] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "abd63285-ee3c-4546-b86d-6d4388765d94" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.039312] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "fcc5a636-554f-424e-a604-a8e7bd7cf574" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.039543] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "c05e9aaa-e251-480c-94d6-56c29bb6282d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.041154] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee928cc0-1f65-4af0-b730-7f86a5b6714f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.044884] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ecc1e7-2317-474d-b42b-4cff8f8948b1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.049917] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9180a1b3-0899-49c0-a9bd-5e39398a8b94 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.056197] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a00e076-7223-46a4-9342-8fd655340eae {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.158990] env[63279]: DEBUG nova.network.neutron [-] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.201897] env[63279]: DEBUG oslo_vmware.api [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Task: {'id': task-2086750, 'name': PowerOffVM_Task, 'duration_secs': 0.457527} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.205027] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1827.205027] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1827.205027] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e367c087-fd90-40d2-9b0a-1fec498ffc43 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.207035] env[63279]: DEBUG nova.compute.manager [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Received event network-vif-plugged-63a3f09b-45d0-46eb-a197-186f1ec415d6 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1827.209096] env[63279]: DEBUG oslo_concurrency.lockutils [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] Acquiring lock "4a9088e0-2992-4b18-8be9-6bc70633369b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.212842] env[63279]: DEBUG oslo_concurrency.lockutils [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] Lock "4a9088e0-2992-4b18-8be9-6bc70633369b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.212842] env[63279]: DEBUG oslo_concurrency.lockutils [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] Lock "4a9088e0-2992-4b18-8be9-6bc70633369b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.004s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.212842] env[63279]: DEBUG nova.compute.manager [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] No waiting events found dispatching network-vif-plugged-63a3f09b-45d0-46eb-a197-186f1ec415d6 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1827.212842] env[63279]: WARNING nova.compute.manager [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Received unexpected event network-vif-plugged-63a3f09b-45d0-46eb-a197-186f1ec415d6 for instance with vm_state building and task_state spawning. [ 1827.213203] env[63279]: DEBUG nova.compute.manager [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Received event network-changed-63a3f09b-45d0-46eb-a197-186f1ec415d6 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1827.213203] env[63279]: DEBUG nova.compute.manager [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Refreshing instance network info cache due to event network-changed-63a3f09b-45d0-46eb-a197-186f1ec415d6. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1827.213382] env[63279]: DEBUG oslo_concurrency.lockutils [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] Acquiring lock "refresh_cache-4a9088e0-2992-4b18-8be9-6bc70633369b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.213531] env[63279]: DEBUG oslo_concurrency.lockutils [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] Acquired lock "refresh_cache-4a9088e0-2992-4b18-8be9-6bc70633369b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.213766] env[63279]: DEBUG nova.network.neutron [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Refreshing network info cache for port 63a3f09b-45d0-46eb-a197-186f1ec415d6 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1827.253366] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086751, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.363917} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.253657] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1827.254556] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea212738-73fa-46ee-a666-221d062ad9ac {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.280849] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 5656c853-ac83-47be-83c4-979a9e87ab91/5656c853-ac83-47be-83c4-979a9e87ab91.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1827.285454] env[63279]: DEBUG oslo_concurrency.lockutils [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] Releasing lock "refresh_cache-6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.285907] env[63279]: DEBUG nova.compute.manager [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Received event network-vif-plugged-2cc4a33a-bd88-4aec-a588-7c821bebf971 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1827.286265] env[63279]: DEBUG oslo_concurrency.lockutils [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] Acquiring lock "5656c853-ac83-47be-83c4-979a9e87ab91-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.286626] env[63279]: DEBUG oslo_concurrency.lockutils [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] Lock "5656c853-ac83-47be-83c4-979a9e87ab91-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.286902] env[63279]: DEBUG oslo_concurrency.lockutils [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] Lock "5656c853-ac83-47be-83c4-979a9e87ab91-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.287236] env[63279]: DEBUG nova.compute.manager [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] No waiting events found dispatching network-vif-plugged-2cc4a33a-bd88-4aec-a588-7c821bebf971 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1827.287515] env[63279]: WARNING nova.compute.manager [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Received unexpected event network-vif-plugged-2cc4a33a-bd88-4aec-a588-7c821bebf971 for instance with vm_state building and task_state spawning. [ 1827.287820] env[63279]: DEBUG nova.compute.manager [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Received event network-changed-2cc4a33a-bd88-4aec-a588-7c821bebf971 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1827.288121] env[63279]: DEBUG nova.compute.manager [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Refreshing instance network info cache due to event network-changed-2cc4a33a-bd88-4aec-a588-7c821bebf971. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1827.289263] env[63279]: DEBUG oslo_concurrency.lockutils [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] Acquiring lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.289623] env[63279]: DEBUG oslo_concurrency.lockutils [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] Acquired lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.290536] env[63279]: DEBUG nova.network.neutron [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Refreshing network info cache for port 2cc4a33a-bd88-4aec-a588-7c821bebf971 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1827.292832] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61a4bf01-3f8d-4d6c-bc71-a9d41470f688 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.310131] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1827.311935] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1827.311935] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Deleting the datastore file [datastore1] a301d225-684d-4f88-bc9b-7e02b8115b9d {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1827.311935] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b9e6269-82eb-4bfc-b3ba-6485b57ee3a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.318244] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086746, 'name': ReconfigVM_Task, 'duration_secs': 0.900865} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.319202] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51/6388f912-ae70-4e8f-b8e4-ceb02e0f8a51.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1827.319919] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 1827.319919] env[63279]: value = "task-2086754" [ 1827.319919] env[63279]: _type = "Task" [ 1827.319919] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.320138] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a61da55e-f3e4-42f0-b3eb-182edc5ebb50 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.326782] env[63279]: DEBUG oslo_vmware.api [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Waiting for the task: (returnval){ [ 1827.326782] env[63279]: value = "task-2086755" [ 1827.326782] env[63279]: _type = "Task" [ 1827.326782] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.336150] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.336437] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1827.336437] env[63279]: value = "task-2086756" [ 1827.336437] env[63279]: _type = "Task" [ 1827.336437] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.343168] env[63279]: DEBUG oslo_vmware.api [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Task: {'id': task-2086755, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.349271] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086756, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.372029] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086748, 'name': CreateVM_Task, 'duration_secs': 0.849976} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.372294] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1827.372974] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.373180] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.373573] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1827.374237] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-964a018a-1207-4e96-9905-dd7632486f72 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.378988] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1827.378988] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521c4143-e9ef-7a2f-b171-e80f81bbde4c" [ 1827.378988] env[63279]: _type = "Task" [ 1827.378988] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.387865] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521c4143-e9ef-7a2f-b171-e80f81bbde4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.402405] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086749, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.727428} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.402698] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] abd63285-ee3c-4546-b86d-6d4388765d94/abd63285-ee3c-4546-b86d-6d4388765d94.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1827.402931] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1827.403313] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1baa90c-9562-469b-9416-a8dcf5da344e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.407711] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.651s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.408514] env[63279]: DEBUG nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1827.412288] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.651s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.413824] env[63279]: INFO nova.compute.claims [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1827.416861] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Waiting for the task: (returnval){ [ 1827.416861] env[63279]: value = "task-2086757" [ 1827.416861] env[63279]: _type = "Task" [ 1827.416861] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.429229] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086757, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.473246] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bb1151-99fa-2f61-9b17-3c24f3c42e16, 'name': SearchDatastore_Task, 'duration_secs': 0.057373} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.473771] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.474161] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 4a9088e0-2992-4b18-8be9-6bc70633369b/4a9088e0-2992-4b18-8be9-6bc70633369b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1827.474635] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c961afe2-5152-4b59-b7fb-91541d6293bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.483038] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1827.483038] env[63279]: value = "task-2086758" [ 1827.483038] env[63279]: _type = "Task" [ 1827.483038] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.492340] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.575984] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "24bce28c-fc43-4f17-9800-4d980f6729bc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.538s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.576349] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.542s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.579171] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "21c2bba1-5482-496a-9e2a-f123a94ed48a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.540s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.586421] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "8c712d0d-55c2-4a14-b759-9441594211e1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.550s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.665669] env[63279]: INFO nova.compute.manager [-] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Took 1.55 seconds to deallocate network for instance. [ 1827.843531] env[63279]: DEBUG oslo_vmware.api [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Task: {'id': task-2086755, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.415455} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.843812] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.849568] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1827.849868] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1827.850137] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1827.850391] env[63279]: INFO nova.compute.manager [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1827.850770] env[63279]: DEBUG oslo.service.loopingcall [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1827.850943] env[63279]: DEBUG nova.compute.manager [-] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1827.851051] env[63279]: DEBUG nova.network.neutron [-] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1827.869051] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Acquiring lock "32dbef6d-d314-4fa6-972a-e7b1f22eb11d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.869051] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Lock "32dbef6d-d314-4fa6-972a-e7b1f22eb11d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.869051] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086756, 'name': Rename_Task, 'duration_secs': 0.202108} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.869051] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1827.869279] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b816268-badd-4ff7-ae38-c98f518f4afd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.875504] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1827.875504] env[63279]: value = "task-2086762" [ 1827.875504] env[63279]: _type = "Task" [ 1827.875504] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.886153] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086762, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.891968] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521c4143-e9ef-7a2f-b171-e80f81bbde4c, 'name': SearchDatastore_Task, 'duration_secs': 0.012641} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.894431] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.894673] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1827.894902] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.895067] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.895283] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1827.895565] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd8d8828-e923-4fee-893d-697e5cc27113 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.905420] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1827.905634] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1827.906455] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad5f3bba-04dd-4417-9c91-130445e675d4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.912751] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1827.912751] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5283e908-6429-d549-a365-6a7ef95aaee3" [ 1827.912751] env[63279]: _type = "Task" [ 1827.912751] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.918330] env[63279]: DEBUG nova.compute.utils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1827.923419] env[63279]: DEBUG nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1827.923601] env[63279]: DEBUG nova.network.neutron [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1827.937093] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5283e908-6429-d549-a365-6a7ef95aaee3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.943468] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086757, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.144426} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.943743] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1827.945285] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfaf897a-05a6-4eb0-8720-05eee26ac5bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.967965] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] abd63285-ee3c-4546-b86d-6d4388765d94/abd63285-ee3c-4546-b86d-6d4388765d94.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1827.972181] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-038141cd-2b5f-4e45-9184-eec1c93c6a10 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.001919] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086758, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.003718] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Waiting for the task: (returnval){ [ 1828.003718] env[63279]: value = "task-2086763" [ 1828.003718] env[63279]: _type = "Task" [ 1828.003718] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.007717] env[63279]: DEBUG nova.policy [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37e32e1b25d1432aadda8ab95694aca0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70c8735ebd5740c4b8b4d0cf8635da71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1828.014203] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086763, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.157566] env[63279]: DEBUG nova.network.neutron [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Updated VIF entry in instance network info cache for port 63a3f09b-45d0-46eb-a197-186f1ec415d6. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1828.158009] env[63279]: DEBUG nova.network.neutron [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Updating instance_info_cache with network_info: [{"id": "63a3f09b-45d0-46eb-a197-186f1ec415d6", "address": "fa:16:3e:71:bc:13", "network": {"id": "548d80cd-fb6c-47fc-8c1d-036889987399", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-219167599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f6c6f65521a440fb80278bbff2d0ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63a3f09b-45", "ovs_interfaceid": "63a3f09b-45d0-46eb-a197-186f1ec415d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1828.172247] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.335690] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086754, 'name': ReconfigVM_Task, 'duration_secs': 0.76296} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.335960] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 5656c853-ac83-47be-83c4-979a9e87ab91/5656c853-ac83-47be-83c4-979a9e87ab91.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1828.336716] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6cba1054-d5ee-460f-9d3e-986e515198e1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.348136] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 1828.348136] env[63279]: value = "task-2086764" [ 1828.348136] env[63279]: _type = "Task" [ 1828.348136] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.364240] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086764, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.366370] env[63279]: DEBUG nova.network.neutron [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updated VIF entry in instance network info cache for port 2cc4a33a-bd88-4aec-a588-7c821bebf971. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1828.366750] env[63279]: DEBUG nova.network.neutron [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating instance_info_cache with network_info: [{"id": "2cc4a33a-bd88-4aec-a588-7c821bebf971", "address": "fa:16:3e:de:77:2e", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc4a33a-bd", "ovs_interfaceid": "2cc4a33a-bd88-4aec-a588-7c821bebf971", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1828.387447] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086762, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.424933] env[63279]: DEBUG nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1828.428072] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5283e908-6429-d549-a365-6a7ef95aaee3, 'name': SearchDatastore_Task, 'duration_secs': 0.025804} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.433282] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1c31671-afa3-43df-9a47-81df48390cd5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.437933] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1828.437933] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bedc84-e467-bc9c-36da-c72ec31cb158" [ 1828.437933] env[63279]: _type = "Task" [ 1828.437933] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.453477] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bedc84-e467-bc9c-36da-c72ec31cb158, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.503607] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086758, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.916433} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.504200] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 4a9088e0-2992-4b18-8be9-6bc70633369b/4a9088e0-2992-4b18-8be9-6bc70633369b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1828.504513] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1828.507652] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c58a5e6-d131-4503-afe7-f0869933b9d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.516348] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086763, 'name': ReconfigVM_Task, 'duration_secs': 0.464028} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.518145] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Reconfigured VM instance instance-0000000c to attach disk [datastore1] abd63285-ee3c-4546-b86d-6d4388765d94/abd63285-ee3c-4546-b86d-6d4388765d94.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1828.518662] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1828.518662] env[63279]: value = "task-2086765" [ 1828.518662] env[63279]: _type = "Task" [ 1828.518662] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.518932] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-651b0b4a-74a5-416c-92c0-c917f2711aed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.532270] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086765, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.533736] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Waiting for the task: (returnval){ [ 1828.533736] env[63279]: value = "task-2086766" [ 1828.533736] env[63279]: _type = "Task" [ 1828.533736] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.543999] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086766, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.627984] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "2f5e22f6-ba70-4848-965b-eb1553115323" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.628598] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "2f5e22f6-ba70-4848-965b-eb1553115323" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.661896] env[63279]: DEBUG oslo_concurrency.lockutils [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] Releasing lock "refresh_cache-4a9088e0-2992-4b18-8be9-6bc70633369b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1828.662609] env[63279]: DEBUG nova.compute.manager [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Received event network-vif-plugged-dc489c36-2ef9-4f47-aadb-4f6503bb9d1b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1828.662609] env[63279]: DEBUG oslo_concurrency.lockutils [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] Acquiring lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.662609] env[63279]: DEBUG oslo_concurrency.lockutils [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] Lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.663133] env[63279]: DEBUG oslo_concurrency.lockutils [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] Lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.663133] env[63279]: DEBUG nova.compute.manager [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] No waiting events found dispatching network-vif-plugged-dc489c36-2ef9-4f47-aadb-4f6503bb9d1b {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1828.663133] env[63279]: WARNING nova.compute.manager [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Received unexpected event network-vif-plugged-dc489c36-2ef9-4f47-aadb-4f6503bb9d1b for instance with vm_state building and task_state spawning. [ 1828.663378] env[63279]: DEBUG nova.compute.manager [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Received event network-changed-dc489c36-2ef9-4f47-aadb-4f6503bb9d1b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1828.663938] env[63279]: DEBUG nova.compute.manager [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Refreshing instance network info cache due to event network-changed-dc489c36-2ef9-4f47-aadb-4f6503bb9d1b. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1828.664203] env[63279]: DEBUG oslo_concurrency.lockutils [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] Acquiring lock "refresh_cache-7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.664430] env[63279]: DEBUG oslo_concurrency.lockutils [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] Acquired lock "refresh_cache-7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.664515] env[63279]: DEBUG nova.network.neutron [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Refreshing network info cache for port dc489c36-2ef9-4f47-aadb-4f6503bb9d1b {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1828.757764] env[63279]: DEBUG nova.network.neutron [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Successfully created port: 404425c8-83a7-4c0b-b938-18e89530246f {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1828.772859] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Acquiring lock "a8107fa5-9e8b-41dd-9679-8e106a3496a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.772859] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Lock "a8107fa5-9e8b-41dd-9679-8e106a3496a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.788992] env[63279]: DEBUG nova.network.neutron [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Successfully updated port: e1063b89-be88-474a-a2ec-b61eb11cf9fe {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1828.821775] env[63279]: DEBUG nova.compute.manager [None req-bd8d2a0f-2875-4e41-ad83-ca16b7e23c72 tempest-ServerExternalEventsTest-519460063 tempest-ServerExternalEventsTest-519460063-project] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Received event network-changed {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1828.821968] env[63279]: DEBUG nova.compute.manager [None req-bd8d2a0f-2875-4e41-ad83-ca16b7e23c72 tempest-ServerExternalEventsTest-519460063 tempest-ServerExternalEventsTest-519460063-project] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Refreshing instance network info cache due to event network-changed. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1828.822203] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd8d2a0f-2875-4e41-ad83-ca16b7e23c72 tempest-ServerExternalEventsTest-519460063 tempest-ServerExternalEventsTest-519460063-project] Acquiring lock "refresh_cache-24bce28c-fc43-4f17-9800-4d980f6729bc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.822443] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd8d2a0f-2875-4e41-ad83-ca16b7e23c72 tempest-ServerExternalEventsTest-519460063 tempest-ServerExternalEventsTest-519460063-project] Acquired lock "refresh_cache-24bce28c-fc43-4f17-9800-4d980f6729bc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.822515] env[63279]: DEBUG nova.network.neutron [None req-bd8d2a0f-2875-4e41-ad83-ca16b7e23c72 tempest-ServerExternalEventsTest-519460063 tempest-ServerExternalEventsTest-519460063-project] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1828.864360] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086764, 'name': Rename_Task, 'duration_secs': 0.300539} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.864715] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1828.864995] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c144e042-74ed-4121-9d94-335608e37382 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.869928] env[63279]: DEBUG oslo_concurrency.lockutils [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] Releasing lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1828.870272] env[63279]: DEBUG nova.compute.manager [req-85d4e5bd-e2bb-465a-a568-79c1512952c6 req-fa1bf5ba-44fc-4852-8448-a74819dc7daa service nova] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Received event network-vif-deleted-82cbc025-4eb9-496c-902d-6b8b3f82ff45 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1828.872795] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 1828.872795] env[63279]: value = "task-2086767" [ 1828.872795] env[63279]: _type = "Task" [ 1828.872795] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.885125] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086767, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.890104] env[63279]: DEBUG oslo_vmware.api [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086762, 'name': PowerOnVM_Task, 'duration_secs': 0.618139} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.890432] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1828.890576] env[63279]: INFO nova.compute.manager [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Took 15.86 seconds to spawn the instance on the hypervisor. [ 1828.890849] env[63279]: DEBUG nova.compute.manager [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1828.895026] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f949400-6393-4f82-9548-d773599391e1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.949410] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bedc84-e467-bc9c-36da-c72ec31cb158, 'name': SearchDatastore_Task, 'duration_secs': 0.025354} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.951644] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1828.951913] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f/7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1828.953037] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b066d57d-2666-4e5c-ae52-3ffda35a91cb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.960410] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1828.960410] env[63279]: value = "task-2086768" [ 1828.960410] env[63279]: _type = "Task" [ 1828.960410] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.971571] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086768, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.977576] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f31040-3b29-4c0f-ac2f-dab8d58999ea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.984967] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f527367f-5b0b-4629-9ec3-faa628db54af {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.017213] env[63279]: DEBUG nova.network.neutron [-] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.019507] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268a67aa-dfc1-466f-a569-e5f671c4059a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.029867] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086765, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114746} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.031976] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1829.032768] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc577f1-55cd-4cbd-ab5b-95f2a849bd54 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.036126] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c595899-c142-427a-b3f8-42ac3036073d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.071288] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 4a9088e0-2992-4b18-8be9-6bc70633369b/4a9088e0-2992-4b18-8be9-6bc70633369b.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1829.071819] env[63279]: DEBUG nova.compute.provider_tree [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1829.076838] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a3a4eed-42f8-488c-acd6-460939914db9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.090461] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086766, 'name': Rename_Task, 'duration_secs': 0.226037} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.091511] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1829.092238] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-583797ca-7404-4b1d-a7ba-728cc6165a75 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.097399] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1829.097399] env[63279]: value = "task-2086769" [ 1829.097399] env[63279]: _type = "Task" [ 1829.097399] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.102180] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Waiting for the task: (returnval){ [ 1829.102180] env[63279]: value = "task-2086770" [ 1829.102180] env[63279]: _type = "Task" [ 1829.102180] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.109074] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086769, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.114425] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086770, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.234791] env[63279]: DEBUG nova.compute.manager [req-f7e5441d-5778-4617-b96b-f40fea2b3c4f req-54d6426d-b8fd-4fc5-aba9-ec8e8dda5864 service nova] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Received event network-vif-deleted-c449bb4b-138e-412d-b39f-b2811a4ac7df {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1829.255995] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "f20e8eb2-847b-4988-abf9-ed5f9f65c25c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.256067] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "f20e8eb2-847b-4988-abf9-ed5f9f65c25c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.291812] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquiring lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.291987] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquired lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.292224] env[63279]: DEBUG nova.network.neutron [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1829.388170] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086767, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.417513] env[63279]: INFO nova.compute.manager [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Took 33.62 seconds to build instance. [ 1829.440112] env[63279]: DEBUG nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1829.441498] env[63279]: DEBUG nova.network.neutron [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Updated VIF entry in instance network info cache for port dc489c36-2ef9-4f47-aadb-4f6503bb9d1b. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1829.442081] env[63279]: DEBUG nova.network.neutron [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Updating instance_info_cache with network_info: [{"id": "dc489c36-2ef9-4f47-aadb-4f6503bb9d1b", "address": "fa:16:3e:37:b8:f9", "network": {"id": "6baee8f1-a2e9-4c94-b06b-d371b1fd5ecd", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-669727083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afdc0a67b163421aafe94f0392a66fa8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c0d5204b-f60e-4830-84c8-2fe246c28202", "external-id": "nsx-vlan-transportzone-104", "segmentation_id": 104, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc489c36-2e", "ovs_interfaceid": "dc489c36-2ef9-4f47-aadb-4f6503bb9d1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.470146] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1829.470450] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1829.470823] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1829.470823] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1829.470941] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1829.471295] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1829.471589] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1829.471801] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1829.472014] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1829.472222] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1829.472500] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1829.473455] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c4aa22-4ff3-40b5-842f-889f60617e55 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.484058] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086768, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.491213] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb4b4d8-f36f-41b7-b9e7-4b8cbb064831 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.520061] env[63279]: INFO nova.compute.manager [-] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Took 1.67 seconds to deallocate network for instance. [ 1829.592728] env[63279]: DEBUG nova.scheduler.client.report [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1829.620860] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086769, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.626511] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086770, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.840828] env[63279]: DEBUG nova.network.neutron [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1829.859275] env[63279]: DEBUG nova.network.neutron [None req-bd8d2a0f-2875-4e41-ad83-ca16b7e23c72 tempest-ServerExternalEventsTest-519460063 tempest-ServerExternalEventsTest-519460063-project] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Updating instance_info_cache with network_info: [{"id": "c3997b27-53bf-4f2d-828a-a55447612331", "address": "fa:16:3e:86:fe:67", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3997b27-53", "ovs_interfaceid": "c3997b27-53bf-4f2d-828a-a55447612331", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.886295] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086767, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.914027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Acquiring lock "21c2bba1-5482-496a-9e2a-f123a94ed48a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.914309] env[63279]: DEBUG oslo_concurrency.lockutils [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Lock "21c2bba1-5482-496a-9e2a-f123a94ed48a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.914509] env[63279]: DEBUG oslo_concurrency.lockutils [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Acquiring lock "21c2bba1-5482-496a-9e2a-f123a94ed48a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.914691] env[63279]: DEBUG oslo_concurrency.lockutils [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Lock "21c2bba1-5482-496a-9e2a-f123a94ed48a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.914855] env[63279]: DEBUG oslo_concurrency.lockutils [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Lock "21c2bba1-5482-496a-9e2a-f123a94ed48a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.917527] env[63279]: INFO nova.compute.manager [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Terminating instance [ 1829.922769] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c56e5cd3-c5d3-497a-94cf-fa4a1928fd50 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.132s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.923645] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.886s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.923970] env[63279]: INFO nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] During sync_power_state the instance has a pending task (spawning). Skip. [ 1829.924181] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.945136] env[63279]: DEBUG oslo_concurrency.lockutils [req-7e0e0fff-b30a-4bb7-964b-e785eb164a75 req-119cf302-3a15-42ff-94d5-f84b57197c32 service nova] Releasing lock "refresh_cache-7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1829.979307] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086768, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.831095} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.979307] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f/7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1829.981176] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1829.981176] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b0144cb-0820-4bf4-9c93-45f7a5866373 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.988108] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1829.988108] env[63279]: value = "task-2086771" [ 1829.988108] env[63279]: _type = "Task" [ 1829.988108] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.000902] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086771, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.004746] env[63279]: DEBUG nova.compute.manager [req-e87a53b0-e315-4eb4-8ca6-37479fe31127 req-b6085691-f605-4a12-9708-6f3ba30c1c18 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Received event network-vif-plugged-e1063b89-be88-474a-a2ec-b61eb11cf9fe {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1830.006464] env[63279]: DEBUG oslo_concurrency.lockutils [req-e87a53b0-e315-4eb4-8ca6-37479fe31127 req-b6085691-f605-4a12-9708-6f3ba30c1c18 service nova] Acquiring lock "fcc5a636-554f-424e-a604-a8e7bd7cf574-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.006841] env[63279]: DEBUG oslo_concurrency.lockutils [req-e87a53b0-e315-4eb4-8ca6-37479fe31127 req-b6085691-f605-4a12-9708-6f3ba30c1c18 service nova] Lock "fcc5a636-554f-424e-a604-a8e7bd7cf574-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.007129] env[63279]: DEBUG oslo_concurrency.lockutils [req-e87a53b0-e315-4eb4-8ca6-37479fe31127 req-b6085691-f605-4a12-9708-6f3ba30c1c18 service nova] Lock "fcc5a636-554f-424e-a604-a8e7bd7cf574-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.007317] env[63279]: DEBUG nova.compute.manager [req-e87a53b0-e315-4eb4-8ca6-37479fe31127 req-b6085691-f605-4a12-9708-6f3ba30c1c18 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] No waiting events found dispatching network-vif-plugged-e1063b89-be88-474a-a2ec-b61eb11cf9fe {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1830.007576] env[63279]: WARNING nova.compute.manager [req-e87a53b0-e315-4eb4-8ca6-37479fe31127 req-b6085691-f605-4a12-9708-6f3ba30c1c18 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Received unexpected event network-vif-plugged-e1063b89-be88-474a-a2ec-b61eb11cf9fe for instance with vm_state building and task_state spawning. [ 1830.007806] env[63279]: DEBUG nova.compute.manager [req-e87a53b0-e315-4eb4-8ca6-37479fe31127 req-b6085691-f605-4a12-9708-6f3ba30c1c18 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Received event network-changed-e1063b89-be88-474a-a2ec-b61eb11cf9fe {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1830.008028] env[63279]: DEBUG nova.compute.manager [req-e87a53b0-e315-4eb4-8ca6-37479fe31127 req-b6085691-f605-4a12-9708-6f3ba30c1c18 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Refreshing instance network info cache due to event network-changed-e1063b89-be88-474a-a2ec-b61eb11cf9fe. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1830.008174] env[63279]: DEBUG oslo_concurrency.lockutils [req-e87a53b0-e315-4eb4-8ca6-37479fe31127 req-b6085691-f605-4a12-9708-6f3ba30c1c18 service nova] Acquiring lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1830.032647] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.102569] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.103083] env[63279]: DEBUG nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1830.108351] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.103s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.109993] env[63279]: INFO nova.compute.claims [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1830.129781] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086769, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.134035] env[63279]: DEBUG oslo_vmware.api [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086770, 'name': PowerOnVM_Task, 'duration_secs': 0.676935} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.134993] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1830.135228] env[63279]: INFO nova.compute.manager [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Took 5.91 seconds to spawn the instance on the hypervisor. [ 1830.135541] env[63279]: DEBUG nova.compute.manager [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1830.136760] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e8ad8b-0134-4114-a792-94431bab6e21 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.236974] env[63279]: DEBUG nova.network.neutron [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Updating instance_info_cache with network_info: [{"id": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "address": "fa:16:3e:b1:26:30", "network": {"id": "50f30894-1239-497e-9f70-afa5b0c429ea", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-85780566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfdf69a97cf54d5cb8c4fb1c59b6a5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1063b89-be", "ovs_interfaceid": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1830.350413] env[63279]: DEBUG oslo_concurrency.lockutils [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Acquiring lock "24bce28c-fc43-4f17-9800-4d980f6729bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.350865] env[63279]: DEBUG oslo_concurrency.lockutils [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Lock "24bce28c-fc43-4f17-9800-4d980f6729bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.351153] env[63279]: DEBUG oslo_concurrency.lockutils [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Acquiring lock "24bce28c-fc43-4f17-9800-4d980f6729bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.351415] env[63279]: DEBUG oslo_concurrency.lockutils [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Lock "24bce28c-fc43-4f17-9800-4d980f6729bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.351856] env[63279]: DEBUG oslo_concurrency.lockutils [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Lock "24bce28c-fc43-4f17-9800-4d980f6729bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.355476] env[63279]: INFO nova.compute.manager [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Terminating instance [ 1830.363169] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd8d2a0f-2875-4e41-ad83-ca16b7e23c72 tempest-ServerExternalEventsTest-519460063 tempest-ServerExternalEventsTest-519460063-project] Releasing lock "refresh_cache-24bce28c-fc43-4f17-9800-4d980f6729bc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.393581] env[63279]: DEBUG oslo_vmware.api [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086767, 'name': PowerOnVM_Task, 'duration_secs': 1.103472} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.393581] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1830.393581] env[63279]: INFO nova.compute.manager [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Took 14.91 seconds to spawn the instance on the hypervisor. [ 1830.393758] env[63279]: DEBUG nova.compute.manager [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1830.396178] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb4d3e1-187d-44cb-86ba-5da58095a223 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.421578] env[63279]: DEBUG nova.compute.manager [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1830.421845] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1830.423021] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5d17a5-f0d9-463d-9273-bc77e9f67d15 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.430729] env[63279]: DEBUG nova.compute.manager [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1830.443133] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1830.443610] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-748ead1a-74ea-4428-af61-c81e7ff98ce2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.454784] env[63279]: DEBUG oslo_vmware.api [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Waiting for the task: (returnval){ [ 1830.454784] env[63279]: value = "task-2086772" [ 1830.454784] env[63279]: _type = "Task" [ 1830.454784] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.467786] env[63279]: DEBUG oslo_vmware.api [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086772, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.499724] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086771, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.226106} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.500980] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1830.501816] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec851f9-8ad9-41bb-a0c1-487df3554eec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.530620] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f/7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1830.531369] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84b7839a-710b-4aa5-b9cb-3bfe51118ab7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.557298] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1830.557298] env[63279]: value = "task-2086773" [ 1830.557298] env[63279]: _type = "Task" [ 1830.557298] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.565028] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086773, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.598067] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Acquiring lock "8c712d0d-55c2-4a14-b759-9441594211e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.598704] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Lock "8c712d0d-55c2-4a14-b759-9441594211e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.598704] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Acquiring lock "8c712d0d-55c2-4a14-b759-9441594211e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.598784] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Lock "8c712d0d-55c2-4a14-b759-9441594211e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.598888] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Lock "8c712d0d-55c2-4a14-b759-9441594211e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.601150] env[63279]: INFO nova.compute.manager [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Terminating instance [ 1830.614507] env[63279]: DEBUG nova.compute.utils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1830.617165] env[63279]: DEBUG nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1830.617341] env[63279]: DEBUG nova.network.neutron [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1830.625730] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086769, 'name': ReconfigVM_Task, 'duration_secs': 1.332301} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.626852] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 4a9088e0-2992-4b18-8be9-6bc70633369b/4a9088e0-2992-4b18-8be9-6bc70633369b.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1830.627964] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbc39b56-b0c0-4200-a82d-e9e0c8640b14 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.637198] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1830.637198] env[63279]: value = "task-2086774" [ 1830.637198] env[63279]: _type = "Task" [ 1830.637198] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.648636] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086774, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.658294] env[63279]: INFO nova.compute.manager [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Took 17.06 seconds to build instance. [ 1830.742146] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Releasing lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.742146] env[63279]: DEBUG nova.compute.manager [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Instance network_info: |[{"id": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "address": "fa:16:3e:b1:26:30", "network": {"id": "50f30894-1239-497e-9f70-afa5b0c429ea", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-85780566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfdf69a97cf54d5cb8c4fb1c59b6a5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1063b89-be", "ovs_interfaceid": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1830.742293] env[63279]: DEBUG oslo_concurrency.lockutils [req-e87a53b0-e315-4eb4-8ca6-37479fe31127 req-b6085691-f605-4a12-9708-6f3ba30c1c18 service nova] Acquired lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1830.742441] env[63279]: DEBUG nova.network.neutron [req-e87a53b0-e315-4eb4-8ca6-37479fe31127 req-b6085691-f605-4a12-9708-6f3ba30c1c18 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Refreshing network info cache for port e1063b89-be88-474a-a2ec-b61eb11cf9fe {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1830.746735] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:26:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2e45023-22b5-458b-826e-9b7eb69ba028', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e1063b89-be88-474a-a2ec-b61eb11cf9fe', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1830.753357] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Creating folder: Project (dfdf69a97cf54d5cb8c4fb1c59b6a5d0). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1830.753652] env[63279]: DEBUG nova.policy [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37e32e1b25d1432aadda8ab95694aca0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70c8735ebd5740c4b8b4d0cf8635da71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1830.755837] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d3f5c36-72c2-4be8-ab01-c2a76f092a85 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.766239] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Created folder: Project (dfdf69a97cf54d5cb8c4fb1c59b6a5d0) in parent group-v427491. [ 1830.767209] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Creating folder: Instances. Parent ref: group-v427529. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1830.767209] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85e30bf3-5916-4602-9cca-cff585fe06f4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.777597] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Created folder: Instances in parent group-v427529. [ 1830.778021] env[63279]: DEBUG oslo.service.loopingcall [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1830.778155] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1830.778474] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2de935c-7e3c-421c-a2a3-6ef7d183afbb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.799428] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1830.799428] env[63279]: value = "task-2086778" [ 1830.799428] env[63279]: _type = "Task" [ 1830.799428] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.808991] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086778, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.860012] env[63279]: DEBUG nova.compute.manager [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1830.860501] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1830.861611] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c657161-f075-46de-b081-0eadbbe4b1e9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.870900] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1830.871191] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc71e1b8-3131-4a04-9be1-83c6e91b47be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.878511] env[63279]: DEBUG oslo_vmware.api [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Waiting for the task: (returnval){ [ 1830.878511] env[63279]: value = "task-2086779" [ 1830.878511] env[63279]: _type = "Task" [ 1830.878511] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.888461] env[63279]: DEBUG oslo_vmware.api [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086779, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.918897] env[63279]: INFO nova.compute.manager [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Took 34.95 seconds to build instance. [ 1830.966168] env[63279]: DEBUG oslo_vmware.api [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086772, 'name': PowerOffVM_Task, 'duration_secs': 0.366671} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.967376] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.967749] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1830.967977] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1830.968717] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1de368dc-6f8c-44ed-ae14-ebdca3471660 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.067828] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1831.067828] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1831.067828] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Deleting the datastore file [datastore1] 21c2bba1-5482-496a-9e2a-f123a94ed48a {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1831.067828] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6437ff51-d01d-48ce-b523-8466f03b1f0a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.074250] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086773, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.079448] env[63279]: DEBUG oslo_vmware.api [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Waiting for the task: (returnval){ [ 1831.079448] env[63279]: value = "task-2086781" [ 1831.079448] env[63279]: _type = "Task" [ 1831.079448] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.092893] env[63279]: DEBUG oslo_vmware.api [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086781, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.107052] env[63279]: DEBUG nova.compute.manager [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1831.107304] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1831.108197] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0c95b6-8c02-4e49-8828-744d7188bc1a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.115388] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1831.115667] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95b96b36-149d-444d-a074-98e5ffca0853 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.121826] env[63279]: DEBUG nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1831.126146] env[63279]: DEBUG oslo_vmware.api [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Waiting for the task: (returnval){ [ 1831.126146] env[63279]: value = "task-2086782" [ 1831.126146] env[63279]: _type = "Task" [ 1831.126146] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.137995] env[63279]: DEBUG oslo_vmware.api [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086782, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.148230] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086774, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.149532] env[63279]: DEBUG nova.network.neutron [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Successfully updated port: 404425c8-83a7-4c0b-b938-18e89530246f {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1831.161130] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11810f0f-6d8f-490b-a1f5-ddff9fda61b5 tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Lock "abd63285-ee3c-4546-b86d-6d4388765d94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.615s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.164060] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "abd63285-ee3c-4546-b86d-6d4388765d94" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.124s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.164060] env[63279]: INFO nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] During sync_power_state the instance has a pending task (spawning). Skip. [ 1831.164222] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "abd63285-ee3c-4546-b86d-6d4388765d94" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.314910] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086778, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.392422] env[63279]: DEBUG oslo_vmware.api [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086779, 'name': PowerOffVM_Task, 'duration_secs': 0.329784} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.392759] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1831.393390] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1831.393810] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7820a26-f740-4284-ad61-c330a0738824 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.422858] env[63279]: DEBUG oslo_concurrency.lockutils [None req-aeaa6e99-ad33-4acc-9a18-53a9725a9078 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "5656c853-ac83-47be-83c4-979a9e87ab91" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.465s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.425188] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "5656c853-ac83-47be-83c4-979a9e87ab91" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.386s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.425188] env[63279]: INFO nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] During sync_power_state the instance has a pending task (spawning). Skip. [ 1831.425188] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "5656c853-ac83-47be-83c4-979a9e87ab91" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.441087] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1831.442488] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1831.532437] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1831.532437] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1831.532437] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Deleting the datastore file [datastore1] 24bce28c-fc43-4f17-9800-4d980f6729bc {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1831.532437] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c88219b-1c27-474b-b2da-70b3ac7ad26a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.542209] env[63279]: DEBUG oslo_vmware.api [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Waiting for the task: (returnval){ [ 1831.542209] env[63279]: value = "task-2086784" [ 1831.542209] env[63279]: _type = "Task" [ 1831.542209] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.555888] env[63279]: DEBUG oslo_vmware.api [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086784, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.571363] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086773, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.599184] env[63279]: DEBUG oslo_vmware.api [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Task: {'id': task-2086781, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.475557} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.603808] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1831.603808] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1831.603808] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1831.603808] env[63279]: INFO nova.compute.manager [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1831.603808] env[63279]: DEBUG oslo.service.loopingcall [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1831.604557] env[63279]: DEBUG nova.compute.manager [-] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1831.604681] env[63279]: DEBUG nova.network.neutron [-] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1831.641856] env[63279]: DEBUG oslo_vmware.api [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086782, 'name': PowerOffVM_Task, 'duration_secs': 0.259135} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.646208] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1831.646208] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1831.648371] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c564247-51fc-45ed-be89-52746e7fc109 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.655487] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "refresh_cache-c05e9aaa-e251-480c-94d6-56c29bb6282d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1831.655655] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired lock "refresh_cache-c05e9aaa-e251-480c-94d6-56c29bb6282d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1831.656556] env[63279]: DEBUG nova.network.neutron [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1831.661836] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086774, 'name': Rename_Task, 'duration_secs': 0.943043} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.661836] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1831.661836] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d4b699d-5e5c-4acc-8e29-44cf3c0efc03 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.664985] env[63279]: DEBUG nova.compute.manager [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1831.672114] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1831.672114] env[63279]: value = "task-2086786" [ 1831.672114] env[63279]: _type = "Task" [ 1831.672114] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.684785] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086786, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.709702] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794aa5c5-2733-44bd-8e2a-1c9c43969c2d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.717383] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4b1fce-a800-46a1-9802-417f42a26e7a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.755571] env[63279]: DEBUG nova.network.neutron [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Successfully created port: b2232b49-2a2b-47ea-b0a9-5ec94f197f11 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1831.759249] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fce114a-4d3d-4623-8dee-573ecb709391 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.762713] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1831.762713] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1831.763030] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Deleting the datastore file [datastore1] 8c712d0d-55c2-4a14-b759-9441594211e1 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1831.764576] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07f14ac5-cba6-4580-83ce-12a0277e136f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.774035] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41052c6d-ad92-4ad5-a21b-219c0c0548f9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.778318] env[63279]: DEBUG oslo_vmware.api [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Waiting for the task: (returnval){ [ 1831.778318] env[63279]: value = "task-2086787" [ 1831.778318] env[63279]: _type = "Task" [ 1831.778318] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.790873] env[63279]: DEBUG nova.compute.provider_tree [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1831.798163] env[63279]: DEBUG oslo_vmware.api [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086787, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.811779] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086778, 'name': CreateVM_Task, 'duration_secs': 0.723557} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.811954] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1831.812750] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1831.813095] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1831.813576] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1831.813636] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb87f9b6-b2d6-4be1-9e28-bc9557dfff8a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.819813] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Waiting for the task: (returnval){ [ 1831.819813] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d0f9e4-3cbc-afc9-80d1-69172b3e6dbe" [ 1831.819813] env[63279]: _type = "Task" [ 1831.819813] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.828598] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d0f9e4-3cbc-afc9-80d1-69172b3e6dbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.926505] env[63279]: DEBUG nova.compute.manager [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1832.053864] env[63279]: DEBUG oslo_vmware.api [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086784, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.067280] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086773, 'name': ReconfigVM_Task, 'duration_secs': 1.374442} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.067280] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f/7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1832.067406] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12c06247-d880-405a-8196-352195409033 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.074319] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1832.074319] env[63279]: value = "task-2086788" [ 1832.074319] env[63279]: _type = "Task" [ 1832.074319] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.086032] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086788, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.101814] env[63279]: DEBUG nova.network.neutron [req-e87a53b0-e315-4eb4-8ca6-37479fe31127 req-b6085691-f605-4a12-9708-6f3ba30c1c18 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Updated VIF entry in instance network info cache for port e1063b89-be88-474a-a2ec-b61eb11cf9fe. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1832.101814] env[63279]: DEBUG nova.network.neutron [req-e87a53b0-e315-4eb4-8ca6-37479fe31127 req-b6085691-f605-4a12-9708-6f3ba30c1c18 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Updating instance_info_cache with network_info: [{"id": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "address": "fa:16:3e:b1:26:30", "network": {"id": "50f30894-1239-497e-9f70-afa5b0c429ea", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-85780566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfdf69a97cf54d5cb8c4fb1c59b6a5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1063b89-be", "ovs_interfaceid": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.120436] env[63279]: DEBUG nova.compute.manager [None req-c0309a7d-34b9-4510-8ce8-8d05589e9871 tempest-ServerDiagnosticsV248Test-1904406206 tempest-ServerDiagnosticsV248Test-1904406206-project-admin] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1832.121355] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4651dc7a-7732-418d-b1a1-75a64b5fee51 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.128662] env[63279]: INFO nova.compute.manager [None req-c0309a7d-34b9-4510-8ce8-8d05589e9871 tempest-ServerDiagnosticsV248Test-1904406206 tempest-ServerDiagnosticsV248Test-1904406206-project-admin] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Retrieving diagnostics [ 1832.129518] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c665821-900d-4534-980e-52dfe797fabb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.159636] env[63279]: DEBUG nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1832.187030] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086786, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.192547] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.196860] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1832.197262] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1832.197524] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1832.197810] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1832.198031] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1832.198208] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1832.198427] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1832.198595] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1832.198767] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1832.198935] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1832.199125] env[63279]: DEBUG nova.virt.hardware [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1832.200292] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba9fa45-c4cd-4e54-8fcd-1aaec88ce4d7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.208237] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b199085-63e2-4506-8411-1a0dc784a7bc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.264208] env[63279]: DEBUG nova.network.neutron [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1832.288411] env[63279]: DEBUG oslo_vmware.api [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Task: {'id': task-2086787, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.482409} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.288411] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1832.288411] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1832.288606] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1832.288606] env[63279]: INFO nova.compute.manager [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1832.288837] env[63279]: DEBUG oslo.service.loopingcall [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1832.289905] env[63279]: DEBUG nova.compute.manager [-] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1832.289905] env[63279]: DEBUG nova.network.neutron [-] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1832.293773] env[63279]: DEBUG nova.scheduler.client.report [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1832.332569] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d0f9e4-3cbc-afc9-80d1-69172b3e6dbe, 'name': SearchDatastore_Task, 'duration_secs': 0.036804} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.334662] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1832.334662] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1832.334662] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1832.334662] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1832.334900] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1832.334900] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a8e21ff-499c-4f59-bf2f-2ee37c74ad43 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.342539] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1832.342731] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1832.343951] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fa703cf-64c4-4fb4-9573-b3dee68b5678 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.349770] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Waiting for the task: (returnval){ [ 1832.349770] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5289deb3-5df7-ce06-d052-fb7fb21a8ea8" [ 1832.349770] env[63279]: _type = "Task" [ 1832.349770] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.357356] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5289deb3-5df7-ce06-d052-fb7fb21a8ea8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.458986] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.469599] env[63279]: DEBUG nova.network.neutron [-] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.524719] env[63279]: DEBUG nova.compute.manager [req-916baaae-e0b4-4263-9438-ebce1e416c61 req-151cb0fa-da9e-4251-80e0-b1161089380e service nova] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Received event network-vif-deleted-05cc981a-fb6c-4723-8b86-d58715a9c5e9 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1832.555021] env[63279]: DEBUG oslo_vmware.api [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Task: {'id': task-2086784, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.563752} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.555374] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1832.555703] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1832.557216] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1832.557216] env[63279]: INFO nova.compute.manager [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Took 1.70 seconds to destroy the instance on the hypervisor. [ 1832.557216] env[63279]: DEBUG oslo.service.loopingcall [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1832.557216] env[63279]: DEBUG nova.compute.manager [-] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1832.557216] env[63279]: DEBUG nova.network.neutron [-] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1832.586918] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086788, 'name': Rename_Task, 'duration_secs': 0.239111} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.589190] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1832.591728] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e3be3d0d-6b99-4118-91ac-0fba3b3af5da {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.599482] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1832.599482] env[63279]: value = "task-2086789" [ 1832.599482] env[63279]: _type = "Task" [ 1832.599482] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.611152] env[63279]: DEBUG oslo_concurrency.lockutils [req-e87a53b0-e315-4eb4-8ca6-37479fe31127 req-b6085691-f605-4a12-9708-6f3ba30c1c18 service nova] Releasing lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1832.611788] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086789, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.634208] env[63279]: DEBUG nova.network.neutron [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Updating instance_info_cache with network_info: [{"id": "404425c8-83a7-4c0b-b938-18e89530246f", "address": "fa:16:3e:7e:2f:9d", "network": {"id": "0a342148-49f5-4993-8a5d-a1f27ddd6002", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-668871377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70c8735ebd5740c4b8b4d0cf8635da71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap404425c8-83", "ovs_interfaceid": "404425c8-83a7-4c0b-b938-18e89530246f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.693036] env[63279]: DEBUG oslo_vmware.api [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086786, 'name': PowerOnVM_Task, 'duration_secs': 0.934199} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.693036] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1832.693036] env[63279]: INFO nova.compute.manager [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Took 14.67 seconds to spawn the instance on the hypervisor. [ 1832.693036] env[63279]: DEBUG nova.compute.manager [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1832.693720] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d9fb92-ba56-46a6-bb06-e3ba4a4ffab3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.799846] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.691s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.799946] env[63279]: DEBUG nova.compute.manager [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1832.805112] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.753s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.805112] env[63279]: DEBUG nova.objects.instance [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Lazy-loading 'resources' on Instance uuid 4063d5e0-1144-40fa-8ed8-efda16730617 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1832.863227] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5289deb3-5df7-ce06-d052-fb7fb21a8ea8, 'name': SearchDatastore_Task, 'duration_secs': 0.028328} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.864060] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ce9b6e1-9289-41fc-9868-25c79076422d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.872833] env[63279]: DEBUG nova.compute.manager [req-dd048739-7c39-41fc-94c2-bf396e123bd1 req-cf3bd06f-814e-4b0a-bb32-ca8965f3478e service nova] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Received event network-vif-plugged-404425c8-83a7-4c0b-b938-18e89530246f {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1832.873070] env[63279]: DEBUG oslo_concurrency.lockutils [req-dd048739-7c39-41fc-94c2-bf396e123bd1 req-cf3bd06f-814e-4b0a-bb32-ca8965f3478e service nova] Acquiring lock "c05e9aaa-e251-480c-94d6-56c29bb6282d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.873318] env[63279]: DEBUG oslo_concurrency.lockutils [req-dd048739-7c39-41fc-94c2-bf396e123bd1 req-cf3bd06f-814e-4b0a-bb32-ca8965f3478e service nova] Lock "c05e9aaa-e251-480c-94d6-56c29bb6282d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.873485] env[63279]: DEBUG oslo_concurrency.lockutils [req-dd048739-7c39-41fc-94c2-bf396e123bd1 req-cf3bd06f-814e-4b0a-bb32-ca8965f3478e service nova] Lock "c05e9aaa-e251-480c-94d6-56c29bb6282d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.873647] env[63279]: DEBUG nova.compute.manager [req-dd048739-7c39-41fc-94c2-bf396e123bd1 req-cf3bd06f-814e-4b0a-bb32-ca8965f3478e service nova] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] No waiting events found dispatching network-vif-plugged-404425c8-83a7-4c0b-b938-18e89530246f {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1832.873814] env[63279]: WARNING nova.compute.manager [req-dd048739-7c39-41fc-94c2-bf396e123bd1 req-cf3bd06f-814e-4b0a-bb32-ca8965f3478e service nova] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Received unexpected event network-vif-plugged-404425c8-83a7-4c0b-b938-18e89530246f for instance with vm_state building and task_state spawning. [ 1832.873994] env[63279]: DEBUG nova.compute.manager [req-dd048739-7c39-41fc-94c2-bf396e123bd1 req-cf3bd06f-814e-4b0a-bb32-ca8965f3478e service nova] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Received event network-changed-404425c8-83a7-4c0b-b938-18e89530246f {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1832.874132] env[63279]: DEBUG nova.compute.manager [req-dd048739-7c39-41fc-94c2-bf396e123bd1 req-cf3bd06f-814e-4b0a-bb32-ca8965f3478e service nova] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Refreshing instance network info cache due to event network-changed-404425c8-83a7-4c0b-b938-18e89530246f. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1832.874300] env[63279]: DEBUG oslo_concurrency.lockutils [req-dd048739-7c39-41fc-94c2-bf396e123bd1 req-cf3bd06f-814e-4b0a-bb32-ca8965f3478e service nova] Acquiring lock "refresh_cache-c05e9aaa-e251-480c-94d6-56c29bb6282d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1832.880472] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Waiting for the task: (returnval){ [ 1832.880472] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526d3adb-18b3-127e-3746-214520723518" [ 1832.880472] env[63279]: _type = "Task" [ 1832.880472] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.889769] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526d3adb-18b3-127e-3746-214520723518, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.972107] env[63279]: INFO nova.compute.manager [-] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Took 1.37 seconds to deallocate network for instance. [ 1833.030095] env[63279]: DEBUG nova.network.neutron [-] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.110405] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086789, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.138642] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Releasing lock "refresh_cache-c05e9aaa-e251-480c-94d6-56c29bb6282d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.138964] env[63279]: DEBUG nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Instance network_info: |[{"id": "404425c8-83a7-4c0b-b938-18e89530246f", "address": "fa:16:3e:7e:2f:9d", "network": {"id": "0a342148-49f5-4993-8a5d-a1f27ddd6002", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-668871377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70c8735ebd5740c4b8b4d0cf8635da71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap404425c8-83", "ovs_interfaceid": "404425c8-83a7-4c0b-b938-18e89530246f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1833.139419] env[63279]: DEBUG oslo_concurrency.lockutils [req-dd048739-7c39-41fc-94c2-bf396e123bd1 req-cf3bd06f-814e-4b0a-bb32-ca8965f3478e service nova] Acquired lock "refresh_cache-c05e9aaa-e251-480c-94d6-56c29bb6282d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1833.139479] env[63279]: DEBUG nova.network.neutron [req-dd048739-7c39-41fc-94c2-bf396e123bd1 req-cf3bd06f-814e-4b0a-bb32-ca8965f3478e service nova] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Refreshing network info cache for port 404425c8-83a7-4c0b-b938-18e89530246f {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1833.140783] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:2f:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4345ef6-a7c8-4c1c-badf-a0d4f578b61c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '404425c8-83a7-4c0b-b938-18e89530246f', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1833.149185] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Creating folder: Project (70c8735ebd5740c4b8b4d0cf8635da71). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1833.150056] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c99ac5be-6dda-4432-90ee-25a14d8507c4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.160823] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Created folder: Project (70c8735ebd5740c4b8b4d0cf8635da71) in parent group-v427491. [ 1833.161127] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Creating folder: Instances. Parent ref: group-v427532. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1833.161510] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-71cae35e-fd47-4823-a357-50b8e9294c6e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.174070] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Created folder: Instances in parent group-v427532. [ 1833.174343] env[63279]: DEBUG oslo.service.loopingcall [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1833.174554] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1833.177754] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f566a7fe-ae70-4e7e-a556-fd3902a06599 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.196961] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1833.196961] env[63279]: value = "task-2086793" [ 1833.196961] env[63279]: _type = "Task" [ 1833.196961] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.205510] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086793, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.215500] env[63279]: INFO nova.compute.manager [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Took 34.66 seconds to build instance. [ 1833.313360] env[63279]: DEBUG nova.compute.utils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1833.315231] env[63279]: DEBUG nova.compute.manager [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1833.315459] env[63279]: DEBUG nova.network.neutron [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1833.326693] env[63279]: DEBUG nova.network.neutron [-] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.361233] env[63279]: DEBUG nova.policy [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f8ee0d477404371bb507a35d7f993c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '50803bde25ba44398da07d2a993e69f7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1833.398357] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526d3adb-18b3-127e-3746-214520723518, 'name': SearchDatastore_Task, 'duration_secs': 0.011719} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.398872] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.399102] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] fcc5a636-554f-424e-a604-a8e7bd7cf574/fcc5a636-554f-424e-a604-a8e7bd7cf574.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1833.399380] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc89b75c-e519-4305-a145-7a493de10dc1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.407994] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Waiting for the task: (returnval){ [ 1833.407994] env[63279]: value = "task-2086794" [ 1833.407994] env[63279]: _type = "Task" [ 1833.407994] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.417026] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2086794, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.481017] env[63279]: DEBUG oslo_concurrency.lockutils [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.533029] env[63279]: INFO nova.compute.manager [-] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Took 1.24 seconds to deallocate network for instance. [ 1833.621244] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086789, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.709494] env[63279]: DEBUG nova.network.neutron [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Successfully created port: 887ee4a3-0844-4571-84d8-486d7c21a7d2 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1833.720685] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e5b6a16c-1913-4848-8b42-b00fb552edc7 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "4a9088e0-2992-4b18-8be9-6bc70633369b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.181s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.720903] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086793, 'name': CreateVM_Task, 'duration_secs': 0.515472} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.723108] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "4a9088e0-2992-4b18-8be9-6bc70633369b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.684s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.723108] env[63279]: INFO nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] During sync_power_state the instance has a pending task (spawning). Skip. [ 1833.723108] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "4a9088e0-2992-4b18-8be9-6bc70633369b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.723108] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1833.723955] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1833.724165] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1833.724491] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1833.724734] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b83d49e0-5924-4589-98a4-49db85109b38 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.729522] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1833.729522] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5211ca35-c3a3-c7ab-3c40-de2bf2fdfbd7" [ 1833.729522] env[63279]: _type = "Task" [ 1833.729522] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.745660] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5211ca35-c3a3-c7ab-3c40-de2bf2fdfbd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.821269] env[63279]: DEBUG nova.compute.manager [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1833.830432] env[63279]: INFO nova.compute.manager [-] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Took 1.27 seconds to deallocate network for instance. [ 1833.863661] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155b5dbd-5b50-4f8c-94b9-79b3ba652ac2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.874624] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a84baf-8fc1-44c5-bbe4-f4d67542964e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.922063] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b23e2fd-392d-478d-9f40-ac6d677ae3d6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.936147] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2086794, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.938174] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450ec1c1-601d-4b34-8a32-c5dcc16db772 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.966678] env[63279]: DEBUG nova.compute.provider_tree [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1834.044940] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.114832] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086789, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.229630] env[63279]: DEBUG nova.compute.manager [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1834.246408] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5211ca35-c3a3-c7ab-3c40-de2bf2fdfbd7, 'name': SearchDatastore_Task, 'duration_secs': 0.017653} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.246470] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1834.246755] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1834.247087] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.247279] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.247505] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1834.248210] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-240cf211-d3f8-48a2-a291-b459f54e75f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.257489] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1834.257718] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1834.258529] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e74b51ba-c44d-49da-ab0b-9fa93222bc9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.264402] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1834.264402] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527752a0-4e5e-63ed-4487-56f77f3466c8" [ 1834.264402] env[63279]: _type = "Task" [ 1834.264402] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.273864] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527752a0-4e5e-63ed-4487-56f77f3466c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.340128] env[63279]: DEBUG oslo_concurrency.lockutils [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.426844] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2086794, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.738117} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.426844] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] fcc5a636-554f-424e-a604-a8e7bd7cf574/fcc5a636-554f-424e-a604-a8e7bd7cf574.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1834.426844] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1834.426844] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3033fa68-d674-4c1b-a892-fc83a029fc6c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.434212] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Waiting for the task: (returnval){ [ 1834.434212] env[63279]: value = "task-2086795" [ 1834.434212] env[63279]: _type = "Task" [ 1834.434212] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.435088] env[63279]: DEBUG nova.network.neutron [req-dd048739-7c39-41fc-94c2-bf396e123bd1 req-cf3bd06f-814e-4b0a-bb32-ca8965f3478e service nova] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Updated VIF entry in instance network info cache for port 404425c8-83a7-4c0b-b938-18e89530246f. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1834.435528] env[63279]: DEBUG nova.network.neutron [req-dd048739-7c39-41fc-94c2-bf396e123bd1 req-cf3bd06f-814e-4b0a-bb32-ca8965f3478e service nova] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Updating instance_info_cache with network_info: [{"id": "404425c8-83a7-4c0b-b938-18e89530246f", "address": "fa:16:3e:7e:2f:9d", "network": {"id": "0a342148-49f5-4993-8a5d-a1f27ddd6002", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-668871377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70c8735ebd5740c4b8b4d0cf8635da71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap404425c8-83", "ovs_interfaceid": "404425c8-83a7-4c0b-b938-18e89530246f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1834.446624] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2086795, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.470452] env[63279]: DEBUG nova.scheduler.client.report [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1834.500542] env[63279]: DEBUG nova.network.neutron [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Successfully updated port: b2232b49-2a2b-47ea-b0a9-5ec94f197f11 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1835.307752] env[63279]: DEBUG nova.compute.manager [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1835.310118] env[63279]: DEBUG oslo_concurrency.lockutils [req-dd048739-7c39-41fc-94c2-bf396e123bd1 req-cf3bd06f-814e-4b0a-bb32-ca8965f3478e service nova] Releasing lock "refresh_cache-c05e9aaa-e251-480c-94d6-56c29bb6282d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.311027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.507s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.314040] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "refresh_cache-3d4db43f-5784-46e1-9710-f6becec011e2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.314040] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired lock "refresh_cache-3d4db43f-5784-46e1-9710-f6becec011e2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.314040] env[63279]: DEBUG nova.network.neutron [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1835.327052] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.842s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.329151] env[63279]: INFO nova.compute.claims [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1835.342033] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "5572bb1c-b378-4531-8459-10c2a2b7afdf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.342214] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "5572bb1c-b378-4531-8459-10c2a2b7afdf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.342462] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086789, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.342722] env[63279]: WARNING oslo_vmware.common.loopingcall [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] task run outlasted interval by 0.23838099999999995 sec [ 1835.352384] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527752a0-4e5e-63ed-4487-56f77f3466c8, 'name': SearchDatastore_Task, 'duration_secs': 0.015409} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.352871] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2086795, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066274} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.353905] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.358621] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1835.358621] env[63279]: INFO nova.scheduler.client.report [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Deleted allocations for instance 4063d5e0-1144-40fa-8ed8-efda16730617 [ 1835.360500] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42ec175e-5ecf-4f9b-8c40-c1d953c75d01 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.363560] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d794359-2c99-4ae0-86fd-e7e59ea3336c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.371812] env[63279]: DEBUG nova.virt.hardware [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1835.372075] env[63279]: DEBUG nova.virt.hardware [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1835.372427] env[63279]: DEBUG nova.virt.hardware [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1835.372557] env[63279]: DEBUG nova.virt.hardware [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1835.372765] env[63279]: DEBUG nova.virt.hardware [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1835.372947] env[63279]: DEBUG nova.virt.hardware [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1835.373272] env[63279]: DEBUG nova.virt.hardware [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1835.373499] env[63279]: DEBUG nova.virt.hardware [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1835.373756] env[63279]: DEBUG nova.virt.hardware [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1835.373900] env[63279]: DEBUG nova.virt.hardware [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1835.374128] env[63279]: DEBUG nova.virt.hardware [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1835.375953] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5facf072-4519-42ce-be0e-20d2b55c6c59 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.383372] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086789, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.385821] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1835.385821] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52526e62-bc4a-5fc5-352a-a78e14a2d42c" [ 1835.385821] env[63279]: _type = "Task" [ 1835.385821] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.405607] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] fcc5a636-554f-424e-a604-a8e7bd7cf574/fcc5a636-554f-424e-a604-a8e7bd7cf574.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1835.409642] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1da8e5dc-a070-422e-ba04-b219300ad196 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.425987] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c44b993-fbf5-4bc3-a6e5-412ec293b9a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.436632] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52526e62-bc4a-5fc5-352a-a78e14a2d42c, 'name': SearchDatastore_Task, 'duration_secs': 0.016518} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.448450] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.448744] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] c05e9aaa-e251-480c-94d6-56c29bb6282d/c05e9aaa-e251-480c-94d6-56c29bb6282d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1835.449606] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Waiting for the task: (returnval){ [ 1835.449606] env[63279]: value = "task-2086797" [ 1835.449606] env[63279]: _type = "Task" [ 1835.449606] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.450250] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-207a4644-547e-4002-a289-b635b868e549 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.461469] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2086797, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.462895] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1835.462895] env[63279]: value = "task-2086798" [ 1835.462895] env[63279]: _type = "Task" [ 1835.462895] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.470447] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.630759] env[63279]: DEBUG nova.network.neutron [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Successfully updated port: 887ee4a3-0844-4571-84d8-486d7c21a7d2 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1835.638425] env[63279]: DEBUG nova.compute.manager [req-8530b16e-b0c2-4877-a88b-19afa467df2e req-ca94aba2-a30a-4f83-b4a9-1a55a2a11a61 service nova] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Received event network-vif-deleted-c3f2f036-d3a2-4b32-b33a-516605f2a1b1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1835.638639] env[63279]: DEBUG nova.compute.manager [req-8530b16e-b0c2-4877-a88b-19afa467df2e req-ca94aba2-a30a-4f83-b4a9-1a55a2a11a61 service nova] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Received event network-vif-deleted-a1328084-8103-45cf-a3f8-b825bebe06c0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1835.638852] env[63279]: DEBUG nova.compute.manager [req-8530b16e-b0c2-4877-a88b-19afa467df2e req-ca94aba2-a30a-4f83-b4a9-1a55a2a11a61 service nova] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Received event network-vif-deleted-c3997b27-53bf-4f2d-828a-a55447612331 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1835.843562] env[63279]: DEBUG nova.compute.manager [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Stashing vm_state: active {{(pid=63279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1835.861847] env[63279]: DEBUG oslo_vmware.api [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086789, 'name': PowerOnVM_Task, 'duration_secs': 3.010345} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.865015] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1835.865015] env[63279]: INFO nova.compute.manager [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Took 15.28 seconds to spawn the instance on the hypervisor. [ 1835.865015] env[63279]: DEBUG nova.compute.manager [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1835.865015] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0972f382-c15b-4679-af28-4b5890bbd45a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.878505] env[63279]: DEBUG nova.network.neutron [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1835.886348] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62274c4c-fa27-46a5-a0f4-00e5f6b30c55 tempest-ServerDiagnosticsNegativeTest-1669012955 tempest-ServerDiagnosticsNegativeTest-1669012955-project-member] Lock "4063d5e0-1144-40fa-8ed8-efda16730617" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.393s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.890530] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "4063d5e0-1144-40fa-8ed8-efda16730617" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 8.855s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.891172] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4860c8e-0045-4b9d-ab9a-210842f6c428 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.902527] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad17be3-f2c5-49c0-b6b3-e42f308b0568 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.962449] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2086797, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.964822] env[63279]: DEBUG nova.compute.manager [req-f64a124f-f574-475b-a2a8-ebfa279559e4 req-819a9d1c-6b12-4899-a41a-b1a16e5f1959 service nova] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Received event network-vif-plugged-b2232b49-2a2b-47ea-b0a9-5ec94f197f11 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1835.965037] env[63279]: DEBUG oslo_concurrency.lockutils [req-f64a124f-f574-475b-a2a8-ebfa279559e4 req-819a9d1c-6b12-4899-a41a-b1a16e5f1959 service nova] Acquiring lock "3d4db43f-5784-46e1-9710-f6becec011e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.965256] env[63279]: DEBUG oslo_concurrency.lockutils [req-f64a124f-f574-475b-a2a8-ebfa279559e4 req-819a9d1c-6b12-4899-a41a-b1a16e5f1959 service nova] Lock "3d4db43f-5784-46e1-9710-f6becec011e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.965459] env[63279]: DEBUG oslo_concurrency.lockutils [req-f64a124f-f574-475b-a2a8-ebfa279559e4 req-819a9d1c-6b12-4899-a41a-b1a16e5f1959 service nova] Lock "3d4db43f-5784-46e1-9710-f6becec011e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.965628] env[63279]: DEBUG nova.compute.manager [req-f64a124f-f574-475b-a2a8-ebfa279559e4 req-819a9d1c-6b12-4899-a41a-b1a16e5f1959 service nova] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] No waiting events found dispatching network-vif-plugged-b2232b49-2a2b-47ea-b0a9-5ec94f197f11 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1835.965796] env[63279]: WARNING nova.compute.manager [req-f64a124f-f574-475b-a2a8-ebfa279559e4 req-819a9d1c-6b12-4899-a41a-b1a16e5f1959 service nova] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Received unexpected event network-vif-plugged-b2232b49-2a2b-47ea-b0a9-5ec94f197f11 for instance with vm_state building and task_state spawning. [ 1835.965975] env[63279]: DEBUG nova.compute.manager [req-f64a124f-f574-475b-a2a8-ebfa279559e4 req-819a9d1c-6b12-4899-a41a-b1a16e5f1959 service nova] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Received event network-changed-b2232b49-2a2b-47ea-b0a9-5ec94f197f11 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1835.966146] env[63279]: DEBUG nova.compute.manager [req-f64a124f-f574-475b-a2a8-ebfa279559e4 req-819a9d1c-6b12-4899-a41a-b1a16e5f1959 service nova] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Refreshing instance network info cache due to event network-changed-b2232b49-2a2b-47ea-b0a9-5ec94f197f11. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1835.966387] env[63279]: DEBUG oslo_concurrency.lockutils [req-f64a124f-f574-475b-a2a8-ebfa279559e4 req-819a9d1c-6b12-4899-a41a-b1a16e5f1959 service nova] Acquiring lock "refresh_cache-3d4db43f-5784-46e1-9710-f6becec011e2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.978900] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086798, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.133919] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Acquiring lock "refresh_cache-5d4909ea-396c-45ba-9ff5-acb8576150b3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.134107] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Acquired lock "refresh_cache-5d4909ea-396c-45ba-9ff5-acb8576150b3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.134258] env[63279]: DEBUG nova.network.neutron [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1836.138059] env[63279]: DEBUG nova.network.neutron [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Updating instance_info_cache with network_info: [{"id": "b2232b49-2a2b-47ea-b0a9-5ec94f197f11", "address": "fa:16:3e:19:66:6c", "network": {"id": "0a342148-49f5-4993-8a5d-a1f27ddd6002", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-668871377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70c8735ebd5740c4b8b4d0cf8635da71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2232b49-2a", "ovs_interfaceid": "b2232b49-2a2b-47ea-b0a9-5ec94f197f11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.387784] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.391299] env[63279]: INFO nova.compute.manager [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Took 23.78 seconds to build instance. [ 1836.454422] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "4063d5e0-1144-40fa-8ed8-efda16730617" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.562s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.476613] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2086797, 'name': ReconfigVM_Task, 'duration_secs': 0.834697} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.477541] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Reconfigured VM instance instance-0000000d to attach disk [datastore1] fcc5a636-554f-424e-a604-a8e7bd7cf574/fcc5a636-554f-424e-a604-a8e7bd7cf574.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1836.478381] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0f220d1-0260-453d-98fe-0bfc953499de {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.488115] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086798, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.759519} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.489561] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] c05e9aaa-e251-480c-94d6-56c29bb6282d/c05e9aaa-e251-480c-94d6-56c29bb6282d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1836.490438] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1836.490438] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b6949121-c031-4893-b9fb-093143763ab6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.495016] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Waiting for the task: (returnval){ [ 1836.495016] env[63279]: value = "task-2086799" [ 1836.495016] env[63279]: _type = "Task" [ 1836.495016] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.509825] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1836.509825] env[63279]: value = "task-2086800" [ 1836.509825] env[63279]: _type = "Task" [ 1836.509825] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.520286] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2086799, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.526810] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086800, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.646906] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Releasing lock "refresh_cache-3d4db43f-5784-46e1-9710-f6becec011e2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1836.647044] env[63279]: DEBUG nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Instance network_info: |[{"id": "b2232b49-2a2b-47ea-b0a9-5ec94f197f11", "address": "fa:16:3e:19:66:6c", "network": {"id": "0a342148-49f5-4993-8a5d-a1f27ddd6002", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-668871377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70c8735ebd5740c4b8b4d0cf8635da71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2232b49-2a", "ovs_interfaceid": "b2232b49-2a2b-47ea-b0a9-5ec94f197f11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1836.648795] env[63279]: DEBUG oslo_concurrency.lockutils [req-f64a124f-f574-475b-a2a8-ebfa279559e4 req-819a9d1c-6b12-4899-a41a-b1a16e5f1959 service nova] Acquired lock "refresh_cache-3d4db43f-5784-46e1-9710-f6becec011e2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.648795] env[63279]: DEBUG nova.network.neutron [req-f64a124f-f574-475b-a2a8-ebfa279559e4 req-819a9d1c-6b12-4899-a41a-b1a16e5f1959 service nova] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Refreshing network info cache for port b2232b49-2a2b-47ea-b0a9-5ec94f197f11 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1836.649914] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:66:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4345ef6-a7c8-4c1c-badf-a0d4f578b61c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2232b49-2a2b-47ea-b0a9-5ec94f197f11', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1836.659357] env[63279]: DEBUG oslo.service.loopingcall [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1836.660333] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1836.660333] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d59afd4c-7701-45c2-8237-c8d7c01bcbe2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.686878] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1836.686878] env[63279]: value = "task-2086801" [ 1836.686878] env[63279]: _type = "Task" [ 1836.686878] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.695775] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086801, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.711173] env[63279]: DEBUG nova.network.neutron [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1836.893504] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee851f5-10d1-4548-acb6-1dcb7700a07f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.897234] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be70fed-855a-424c-af8e-36597da29526 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.092s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.899566] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 9.861s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.899764] env[63279]: INFO nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] During sync_power_state the instance has a pending task (spawning). Skip. [ 1836.900147] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.906111] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb05901f-b2ce-4fb8-bfc7-fbc5619b448f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.948690] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2285f92-3769-4edc-98b8-816438072d6a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.958845] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7a3668-f653-4ccd-b834-364bb28d0ab3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.980247] env[63279]: DEBUG nova.compute.provider_tree [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1837.007935] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2086799, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.021292] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086800, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.165591} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.022442] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1837.022747] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1210e73-5aa4-44f3-b1f2-3893c5c44a7e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.050937] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] c05e9aaa-e251-480c-94d6-56c29bb6282d/c05e9aaa-e251-480c-94d6-56c29bb6282d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1837.050937] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71c2a1b9-9905-456b-9225-6cb6016f01e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.069889] env[63279]: DEBUG nova.network.neutron [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Updating instance_info_cache with network_info: [{"id": "887ee4a3-0844-4571-84d8-486d7c21a7d2", "address": "fa:16:3e:52:14:49", "network": {"id": "4b6f5566-0923-4169-8f7f-f8afa7b7550c", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-769172874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50803bde25ba44398da07d2a993e69f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap887ee4a3-08", "ovs_interfaceid": "887ee4a3-0844-4571-84d8-486d7c21a7d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.075263] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1837.075263] env[63279]: value = "task-2086803" [ 1837.075263] env[63279]: _type = "Task" [ 1837.075263] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.087811] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086803, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.167700] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "c1ac4af5-b01e-4175-844f-7a67b2ef7526" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.168086] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "c1ac4af5-b01e-4175-844f-7a67b2ef7526" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.202454] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086801, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.401401] env[63279]: DEBUG nova.compute.manager [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1837.483977] env[63279]: DEBUG nova.scheduler.client.report [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1837.513013] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2086799, 'name': Rename_Task, 'duration_secs': 0.94166} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.513821] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1837.514114] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edeb8fda-7441-48d7-91a3-76cd47eefe51 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.526281] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Waiting for the task: (returnval){ [ 1837.526281] env[63279]: value = "task-2086804" [ 1837.526281] env[63279]: _type = "Task" [ 1837.526281] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.541776] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2086804, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.556687] env[63279]: DEBUG nova.network.neutron [req-f64a124f-f574-475b-a2a8-ebfa279559e4 req-819a9d1c-6b12-4899-a41a-b1a16e5f1959 service nova] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Updated VIF entry in instance network info cache for port b2232b49-2a2b-47ea-b0a9-5ec94f197f11. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1837.557085] env[63279]: DEBUG nova.network.neutron [req-f64a124f-f574-475b-a2a8-ebfa279559e4 req-819a9d1c-6b12-4899-a41a-b1a16e5f1959 service nova] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Updating instance_info_cache with network_info: [{"id": "b2232b49-2a2b-47ea-b0a9-5ec94f197f11", "address": "fa:16:3e:19:66:6c", "network": {"id": "0a342148-49f5-4993-8a5d-a1f27ddd6002", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-668871377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70c8735ebd5740c4b8b4d0cf8635da71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2232b49-2a", "ovs_interfaceid": "b2232b49-2a2b-47ea-b0a9-5ec94f197f11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.573230] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Releasing lock "refresh_cache-5d4909ea-396c-45ba-9ff5-acb8576150b3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.574484] env[63279]: DEBUG nova.compute.manager [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Instance network_info: |[{"id": "887ee4a3-0844-4571-84d8-486d7c21a7d2", "address": "fa:16:3e:52:14:49", "network": {"id": "4b6f5566-0923-4169-8f7f-f8afa7b7550c", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-769172874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50803bde25ba44398da07d2a993e69f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap887ee4a3-08", "ovs_interfaceid": "887ee4a3-0844-4571-84d8-486d7c21a7d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1837.574655] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:14:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37fb1918-d178-4e12-93e6-316381e78be4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '887ee4a3-0844-4571-84d8-486d7c21a7d2', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1837.587159] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Creating folder: Project (50803bde25ba44398da07d2a993e69f7). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1837.588173] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c3585dd-7dd6-4ec7-a6f3-33608d6e8eb5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.603102] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086803, 'name': ReconfigVM_Task, 'duration_secs': 0.474113} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.603592] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Reconfigured VM instance instance-0000000e to attach disk [datastore1] c05e9aaa-e251-480c-94d6-56c29bb6282d/c05e9aaa-e251-480c-94d6-56c29bb6282d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1837.604522] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0f68b95-aeb2-4bd0-81bf-528bf781fca3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.608791] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Created folder: Project (50803bde25ba44398da07d2a993e69f7) in parent group-v427491. [ 1837.608791] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Creating folder: Instances. Parent ref: group-v427537. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1837.609379] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65252f6f-e1fc-4346-92b2-a128fc1fb193 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.615377] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1837.615377] env[63279]: value = "task-2086806" [ 1837.615377] env[63279]: _type = "Task" [ 1837.615377] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.626846] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Created folder: Instances in parent group-v427537. [ 1837.626846] env[63279]: DEBUG oslo.service.loopingcall [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1837.627662] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1837.628229] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086806, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.628452] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e03c2463-9e88-487c-89a4-6aa3d32a50f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.650553] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1837.650553] env[63279]: value = "task-2086808" [ 1837.650553] env[63279]: _type = "Task" [ 1837.650553] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.660618] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086808, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.702013] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086801, 'name': CreateVM_Task, 'duration_secs': 0.675403} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.702013] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1837.702013] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.702013] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.703541] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1837.703541] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cc12c62-7c37-4dea-b898-3de937cd1ae2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.709988] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1837.709988] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52659b8f-3b93-178d-7792-b3733cb61fb0" [ 1837.709988] env[63279]: _type = "Task" [ 1837.709988] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.721518] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52659b8f-3b93-178d-7792-b3733cb61fb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.991571] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.664s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.994224] env[63279]: DEBUG nova.compute.manager [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1838.000803] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.828s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.000803] env[63279]: DEBUG nova.objects.instance [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Lazy-loading 'resources' on Instance uuid ac1d0e8f-446a-4a6d-a916-08f52426396d {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1838.043025] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2086804, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.060648] env[63279]: DEBUG oslo_concurrency.lockutils [req-f64a124f-f574-475b-a2a8-ebfa279559e4 req-819a9d1c-6b12-4899-a41a-b1a16e5f1959 service nova] Releasing lock "refresh_cache-3d4db43f-5784-46e1-9710-f6becec011e2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1838.127646] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.132106] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086806, 'name': Rename_Task, 'duration_secs': 0.247858} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.132938] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1838.132938] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-239e3069-2768-470c-b2cd-8c83d4c7634b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.145860] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1838.145860] env[63279]: value = "task-2086809" [ 1838.145860] env[63279]: _type = "Task" [ 1838.145860] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.168256] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086809, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.171765] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086808, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.230296] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52659b8f-3b93-178d-7792-b3733cb61fb0, 'name': SearchDatastore_Task, 'duration_secs': 0.016401} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.230296] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1838.230296] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1838.230296] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.230507] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.230507] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1838.230507] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad00c44f-da5e-4f43-99b5-afd6f41bbd15 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.246725] env[63279]: DEBUG nova.compute.manager [req-246407ef-c73d-4715-9d68-7a76829dce55 req-761ed9e1-930a-47af-ae62-4ce2442bcf7a service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Received event network-vif-plugged-887ee4a3-0844-4571-84d8-486d7c21a7d2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1838.248379] env[63279]: DEBUG oslo_concurrency.lockutils [req-246407ef-c73d-4715-9d68-7a76829dce55 req-761ed9e1-930a-47af-ae62-4ce2442bcf7a service nova] Acquiring lock "5d4909ea-396c-45ba-9ff5-acb8576150b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.248379] env[63279]: DEBUG oslo_concurrency.lockutils [req-246407ef-c73d-4715-9d68-7a76829dce55 req-761ed9e1-930a-47af-ae62-4ce2442bcf7a service nova] Lock "5d4909ea-396c-45ba-9ff5-acb8576150b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.248379] env[63279]: DEBUG oslo_concurrency.lockutils [req-246407ef-c73d-4715-9d68-7a76829dce55 req-761ed9e1-930a-47af-ae62-4ce2442bcf7a service nova] Lock "5d4909ea-396c-45ba-9ff5-acb8576150b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.249416] env[63279]: DEBUG nova.compute.manager [req-246407ef-c73d-4715-9d68-7a76829dce55 req-761ed9e1-930a-47af-ae62-4ce2442bcf7a service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] No waiting events found dispatching network-vif-plugged-887ee4a3-0844-4571-84d8-486d7c21a7d2 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1838.249795] env[63279]: WARNING nova.compute.manager [req-246407ef-c73d-4715-9d68-7a76829dce55 req-761ed9e1-930a-47af-ae62-4ce2442bcf7a service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Received unexpected event network-vif-plugged-887ee4a3-0844-4571-84d8-486d7c21a7d2 for instance with vm_state building and task_state spawning. [ 1838.250147] env[63279]: DEBUG nova.compute.manager [req-246407ef-c73d-4715-9d68-7a76829dce55 req-761ed9e1-930a-47af-ae62-4ce2442bcf7a service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Received event network-changed-887ee4a3-0844-4571-84d8-486d7c21a7d2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1838.252057] env[63279]: DEBUG nova.compute.manager [req-246407ef-c73d-4715-9d68-7a76829dce55 req-761ed9e1-930a-47af-ae62-4ce2442bcf7a service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Refreshing instance network info cache due to event network-changed-887ee4a3-0844-4571-84d8-486d7c21a7d2. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1838.252057] env[63279]: DEBUG oslo_concurrency.lockutils [req-246407ef-c73d-4715-9d68-7a76829dce55 req-761ed9e1-930a-47af-ae62-4ce2442bcf7a service nova] Acquiring lock "refresh_cache-5d4909ea-396c-45ba-9ff5-acb8576150b3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.252057] env[63279]: DEBUG oslo_concurrency.lockutils [req-246407ef-c73d-4715-9d68-7a76829dce55 req-761ed9e1-930a-47af-ae62-4ce2442bcf7a service nova] Acquired lock "refresh_cache-5d4909ea-396c-45ba-9ff5-acb8576150b3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.252057] env[63279]: DEBUG nova.network.neutron [req-246407ef-c73d-4715-9d68-7a76829dce55 req-761ed9e1-930a-47af-ae62-4ce2442bcf7a service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Refreshing network info cache for port 887ee4a3-0844-4571-84d8-486d7c21a7d2 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1838.254847] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1838.255219] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1838.256798] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1523c202-df02-427f-a6c3-2c26305d7c09 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.268336] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1838.268336] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52e41832-52e5-532d-aba3-1b4a0fc45a45" [ 1838.268336] env[63279]: _type = "Task" [ 1838.268336] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.287362] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e41832-52e5-532d-aba3-1b4a0fc45a45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.504991] env[63279]: DEBUG nova.compute.utils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1838.509447] env[63279]: DEBUG nova.compute.manager [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1838.509447] env[63279]: DEBUG nova.network.neutron [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1838.539540] env[63279]: DEBUG oslo_vmware.api [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2086804, 'name': PowerOnVM_Task, 'duration_secs': 0.989931} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.539928] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1838.540160] env[63279]: INFO nova.compute.manager [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Took 11.74 seconds to spawn the instance on the hypervisor. [ 1838.540344] env[63279]: DEBUG nova.compute.manager [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1838.541459] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1e71ab-fe9c-43b1-826b-c99cc79e529c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.628779] env[63279]: DEBUG nova.policy [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0ac7413a93e48ca979af98a90b38cc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e931e0ddf1b840fdb8743fbdba05b28d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1838.661691] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086809, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.668595] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086808, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.784058] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e41832-52e5-532d-aba3-1b4a0fc45a45, 'name': SearchDatastore_Task, 'duration_secs': 0.020479} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.785371] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4d3cddb-5287-4bca-b4bd-e654a50933ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.792021] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1838.792021] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527aaeb3-faee-d1fa-de96-e11391112ab1" [ 1838.792021] env[63279]: _type = "Task" [ 1838.792021] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.800861] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527aaeb3-faee-d1fa-de96-e11391112ab1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.010051] env[63279]: DEBUG nova.compute.manager [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1839.036639] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da4bae7-d295-427a-8f14-536a74d806d8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.045585] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092e8e6c-024c-46ca-9cb4-fa03da68d979 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.121024] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e52c7f08-8218-4206-8cd9-64b8d1aee65f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.121024] env[63279]: INFO nova.compute.manager [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Took 23.81 seconds to build instance. [ 1839.131777] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9694bf37-e25a-40d4-b629-51f00adce201 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.151615] env[63279]: DEBUG nova.compute.provider_tree [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1839.169444] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086809, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.170702] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086808, 'name': CreateVM_Task, 'duration_secs': 1.385826} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.170702] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1839.170702] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1839.170702] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.170986] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1839.171737] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f6d68a1-fafb-4852-8930-6885abc0a645 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.182680] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Waiting for the task: (returnval){ [ 1839.182680] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521f9b5d-fe72-f158-ad16-7bedf052dd27" [ 1839.182680] env[63279]: _type = "Task" [ 1839.182680] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.189930] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521f9b5d-fe72-f158-ad16-7bedf052dd27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.220099] env[63279]: DEBUG nova.network.neutron [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Successfully created port: 02e5d9ab-c16c-4141-a131-b7e9f2a2a194 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1839.317488] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527aaeb3-faee-d1fa-de96-e11391112ab1, 'name': SearchDatastore_Task, 'duration_secs': 0.020243} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.317920] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.319596] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 3d4db43f-5784-46e1-9710-f6becec011e2/3d4db43f-5784-46e1-9710-f6becec011e2.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1839.319983] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2dce0ef3-0418-492f-bac5-cab3318dde4e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.335238] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1839.335238] env[63279]: value = "task-2086811" [ 1839.335238] env[63279]: _type = "Task" [ 1839.335238] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.349739] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086811, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.419636] env[63279]: DEBUG nova.network.neutron [req-246407ef-c73d-4715-9d68-7a76829dce55 req-761ed9e1-930a-47af-ae62-4ce2442bcf7a service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Updated VIF entry in instance network info cache for port 887ee4a3-0844-4571-84d8-486d7c21a7d2. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1839.420838] env[63279]: DEBUG nova.network.neutron [req-246407ef-c73d-4715-9d68-7a76829dce55 req-761ed9e1-930a-47af-ae62-4ce2442bcf7a service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Updating instance_info_cache with network_info: [{"id": "887ee4a3-0844-4571-84d8-486d7c21a7d2", "address": "fa:16:3e:52:14:49", "network": {"id": "4b6f5566-0923-4169-8f7f-f8afa7b7550c", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-769172874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50803bde25ba44398da07d2a993e69f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap887ee4a3-08", "ovs_interfaceid": "887ee4a3-0844-4571-84d8-486d7c21a7d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.444431] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.444431] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.623783] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7076d91-54cd-478d-8de7-9cdce51faa84 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Lock "fcc5a636-554f-424e-a604-a8e7bd7cf574" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.296s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.627605] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "fcc5a636-554f-424e-a604-a8e7bd7cf574" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.588s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.627731] env[63279]: INFO nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] During sync_power_state the instance has a pending task (spawning). Skip. [ 1839.627933] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "fcc5a636-554f-424e-a604-a8e7bd7cf574" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.657830] env[63279]: DEBUG nova.scheduler.client.report [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1839.667545] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086809, 'name': PowerOnVM_Task, 'duration_secs': 1.255847} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.667924] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1839.668158] env[63279]: INFO nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Took 10.23 seconds to spawn the instance on the hypervisor. [ 1839.668326] env[63279]: DEBUG nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1839.669508] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8fc3a2-db56-4f86-8db7-51f54fe0ba51 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.695123] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521f9b5d-fe72-f158-ad16-7bedf052dd27, 'name': SearchDatastore_Task, 'duration_secs': 0.016975} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.695123] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.695363] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1839.695512] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1839.695650] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.695826] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1839.696096] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12bf3999-7b32-4eb3-bd7b-300eb37b6054 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.708170] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1839.712694] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1839.712694] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4273170-ea10-4fe2-9c43-65e4ca2da262 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.717838] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Waiting for the task: (returnval){ [ 1839.717838] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ad6f10-4330-245a-b73d-2b4cfe399792" [ 1839.717838] env[63279]: _type = "Task" [ 1839.717838] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.726567] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ad6f10-4330-245a-b73d-2b4cfe399792, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.849907] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086811, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.924738] env[63279]: DEBUG oslo_concurrency.lockutils [req-246407ef-c73d-4715-9d68-7a76829dce55 req-761ed9e1-930a-47af-ae62-4ce2442bcf7a service nova] Releasing lock "refresh_cache-5d4909ea-396c-45ba-9ff5-acb8576150b3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.025767] env[63279]: DEBUG nova.compute.manager [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1840.065225] env[63279]: DEBUG nova.virt.hardware [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:51:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1077870210',id=20,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1937849382',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1840.065543] env[63279]: DEBUG nova.virt.hardware [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1840.065753] env[63279]: DEBUG nova.virt.hardware [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1840.065951] env[63279]: DEBUG nova.virt.hardware [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1840.066360] env[63279]: DEBUG nova.virt.hardware [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1840.066596] env[63279]: DEBUG nova.virt.hardware [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1840.066873] env[63279]: DEBUG nova.virt.hardware [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1840.067239] env[63279]: DEBUG nova.virt.hardware [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1840.067239] env[63279]: DEBUG nova.virt.hardware [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1840.067398] env[63279]: DEBUG nova.virt.hardware [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1840.067590] env[63279]: DEBUG nova.virt.hardware [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1840.068730] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4456b6-dd8f-4ee4-b454-1a4a6351b8ff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.079059] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fefc109-54c5-46b1-b45a-52d8189e463f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.131555] env[63279]: DEBUG nova.compute.manager [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1840.170898] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.169s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.173537] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.140s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.173699] env[63279]: DEBUG nova.objects.instance [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Lazy-loading 'resources' on Instance uuid a301d225-684d-4f88-bc9b-7e02b8115b9d {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1840.200928] env[63279]: INFO nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Took 20.63 seconds to build instance. [ 1840.206231] env[63279]: INFO nova.scheduler.client.report [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Deleted allocations for instance ac1d0e8f-446a-4a6d-a916-08f52426396d [ 1840.229764] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ad6f10-4330-245a-b73d-2b4cfe399792, 'name': SearchDatastore_Task, 'duration_secs': 0.013852} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.230712] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71a7e889-7e28-4714-af52-4e72091f282c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.238576] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Waiting for the task: (returnval){ [ 1840.238576] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b6763a-0d2c-6bc9-61fd-d551e629299d" [ 1840.238576] env[63279]: _type = "Task" [ 1840.238576] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.249162] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b6763a-0d2c-6bc9-61fd-d551e629299d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.312289] env[63279]: DEBUG nova.compute.manager [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1840.312289] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3fef53-3ce1-4d36-9c11-62af93ed5b67 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.348155] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086811, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.840306} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.348155] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 3d4db43f-5784-46e1-9710-f6becec011e2/3d4db43f-5784-46e1-9710-f6becec011e2.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1840.348155] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1840.348155] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0988adf-dceb-4e3e-ab40-691abab852b4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.357135] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1840.357135] env[63279]: value = "task-2086812" [ 1840.357135] env[63279]: _type = "Task" [ 1840.357135] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.367961] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086812, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.442176] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1840.442438] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1840.442561] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 1840.481133] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "5bb445d3-1b12-4a1b-ad2a-cbc929b13aee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.481684] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "5bb445d3-1b12-4a1b-ad2a-cbc929b13aee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.659470] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.710266] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "c05e9aaa-e251-480c-94d6-56c29bb6282d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.230s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.711961] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "c05e9aaa-e251-480c-94d6-56c29bb6282d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 13.673s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.715881] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef74ac5-cecb-41a1-b12c-99572489071b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.719786] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b6dec82-a075-43f4-a12b-6738f27bbf78 tempest-ServerDiagnosticsTest-1267197074 tempest-ServerDiagnosticsTest-1267197074-project-member] Lock "ac1d0e8f-446a-4a6d-a916-08f52426396d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.285s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.722296] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "ac1d0e8f-446a-4a6d-a916-08f52426396d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 13.687s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.722423] env[63279]: INFO nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1840.722608] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "ac1d0e8f-446a-4a6d-a916-08f52426396d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.750468] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b6763a-0d2c-6bc9-61fd-d551e629299d, 'name': SearchDatastore_Task, 'duration_secs': 0.012367} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.755308] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.755308] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 5d4909ea-396c-45ba-9ff5-acb8576150b3/5d4909ea-396c-45ba-9ff5-acb8576150b3.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1840.755308] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5cf192e8-87d8-4e7a-9849-151775dbe37c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.763203] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Waiting for the task: (returnval){ [ 1840.763203] env[63279]: value = "task-2086813" [ 1840.763203] env[63279]: _type = "Task" [ 1840.763203] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.772636] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2086813, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.830039] env[63279]: INFO nova.compute.manager [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] instance snapshotting [ 1840.831076] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513292ae-dec4-46ac-bc54-403ad5bf744f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.853611] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a0200c-c83e-4a95-a39f-e96bfbfb4d13 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.874393] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086812, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072288} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.876767] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1840.876767] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed2ee6c-33df-4cf1-8204-8825ec49b682 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.901749] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 3d4db43f-5784-46e1-9710-f6becec011e2/3d4db43f-5784-46e1-9710-f6becec011e2.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1840.905008] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd14c85f-88e1-4ab6-8be8-dc935e8ec98c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.928318] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1840.928318] env[63279]: value = "task-2086814" [ 1840.928318] env[63279]: _type = "Task" [ 1840.928318] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.940700] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086814, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.949772] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Skipping network cache update for instance because it is Building. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10460}} [ 1840.950108] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Skipping network cache update for instance because it is Building. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10460}} [ 1840.950385] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Skipping network cache update for instance because it is Building. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10460}} [ 1841.006049] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "refresh_cache-0224e4ea-c13c-4abd-9626-6038c0bbe4e9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1841.006049] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquired lock "refresh_cache-0224e4ea-c13c-4abd-9626-6038c0bbe4e9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1841.006049] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Forcefully refreshing network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1841.006049] env[63279]: DEBUG nova.objects.instance [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lazy-loading 'info_cache' on Instance uuid 0224e4ea-c13c-4abd-9626-6038c0bbe4e9 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1841.216807] env[63279]: DEBUG nova.compute.manager [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1841.239056] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "c05e9aaa-e251-480c-94d6-56c29bb6282d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.527s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.287530] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2086813, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.380306] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1841.380306] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a1fb6e92-b1dd-4b95-8a93-b1d76a677b63 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.392113] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1841.392113] env[63279]: value = "task-2086816" [ 1841.392113] env[63279]: _type = "Task" [ 1841.392113] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.401660] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a59fc81-3366-45e2-b9fe-8d7d041952c7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.415024] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086816, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.421676] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13456d1a-0685-4725-940b-4f60d944d50a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.464954] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9818c0e6-eab5-4abf-9c98-d5d11532cfe3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.478722] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b2dc0a-5df9-4665-bdef-f14de0902be9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.483025] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086814, 'name': ReconfigVM_Task, 'duration_secs': 0.330355} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.483346] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 3d4db43f-5784-46e1-9710-f6becec011e2/3d4db43f-5784-46e1-9710-f6becec011e2.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1841.484470] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ddfc953-2e15-4c01-b565-9f6f0d6a8026 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.495908] env[63279]: DEBUG nova.compute.provider_tree [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1841.499534] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1841.499534] env[63279]: value = "task-2086817" [ 1841.499534] env[63279]: _type = "Task" [ 1841.499534] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.512577] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086817, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.614397] env[63279]: DEBUG nova.network.neutron [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Successfully updated port: 02e5d9ab-c16c-4141-a131-b7e9f2a2a194 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1841.711973] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "acf95fad-316c-4605-9008-24d4d7c05892" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.712270] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "acf95fad-316c-4605-9008-24d4d7c05892" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.748082] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.776650] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2086813, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.698253} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.776926] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 5d4909ea-396c-45ba-9ff5-acb8576150b3/5d4909ea-396c-45ba-9ff5-acb8576150b3.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1841.777200] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1841.777424] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29f2560b-2687-49a7-a8e6-97c397bc17f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.786723] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Waiting for the task: (returnval){ [ 1841.786723] env[63279]: value = "task-2086818" [ 1841.786723] env[63279]: _type = "Task" [ 1841.786723] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.796840] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2086818, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.902883] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086816, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.001042] env[63279]: DEBUG nova.scheduler.client.report [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1842.017274] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086817, 'name': Rename_Task, 'duration_secs': 0.190781} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.017274] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1842.019223] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-016ecfe1-3b18-4cd1-9a83-d203e8ef9402 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.029505] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1842.029505] env[63279]: value = "task-2086819" [ 1842.029505] env[63279]: _type = "Task" [ 1842.029505] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.042105] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086819, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.116758] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "refresh_cache-1a55008a-4d8c-403d-a8f4-966aa7346f4c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.116819] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquired lock "refresh_cache-1a55008a-4d8c-403d-a8f4-966aa7346f4c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.117018] env[63279]: DEBUG nova.network.neutron [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1842.180307] env[63279]: DEBUG nova.compute.manager [req-a544f704-32c4-49f2-a663-3970b860ceaf req-9d3fcd19-6416-45d3-9afb-f7b878162a25 service nova] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Received event network-vif-plugged-02e5d9ab-c16c-4141-a131-b7e9f2a2a194 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1842.180557] env[63279]: DEBUG oslo_concurrency.lockutils [req-a544f704-32c4-49f2-a663-3970b860ceaf req-9d3fcd19-6416-45d3-9afb-f7b878162a25 service nova] Acquiring lock "1a55008a-4d8c-403d-a8f4-966aa7346f4c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.180839] env[63279]: DEBUG oslo_concurrency.lockutils [req-a544f704-32c4-49f2-a663-3970b860ceaf req-9d3fcd19-6416-45d3-9afb-f7b878162a25 service nova] Lock "1a55008a-4d8c-403d-a8f4-966aa7346f4c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.181268] env[63279]: DEBUG oslo_concurrency.lockutils [req-a544f704-32c4-49f2-a663-3970b860ceaf req-9d3fcd19-6416-45d3-9afb-f7b878162a25 service nova] Lock "1a55008a-4d8c-403d-a8f4-966aa7346f4c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.181553] env[63279]: DEBUG nova.compute.manager [req-a544f704-32c4-49f2-a663-3970b860ceaf req-9d3fcd19-6416-45d3-9afb-f7b878162a25 service nova] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] No waiting events found dispatching network-vif-plugged-02e5d9ab-c16c-4141-a131-b7e9f2a2a194 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1842.181799] env[63279]: WARNING nova.compute.manager [req-a544f704-32c4-49f2-a663-3970b860ceaf req-9d3fcd19-6416-45d3-9afb-f7b878162a25 service nova] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Received unexpected event network-vif-plugged-02e5d9ab-c16c-4141-a131-b7e9f2a2a194 for instance with vm_state building and task_state spawning. [ 1842.299839] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2086818, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.192687} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.300270] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1842.301375] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e29ee5-f87e-48d0-84b4-ce91db3f4a66 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.337948] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 5d4909ea-396c-45ba-9ff5-acb8576150b3/5d4909ea-396c-45ba-9ff5-acb8576150b3.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1842.337948] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-747f647c-1dc3-4730-95bb-9192b3fab28e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.360387] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Waiting for the task: (returnval){ [ 1842.360387] env[63279]: value = "task-2086820" [ 1842.360387] env[63279]: _type = "Task" [ 1842.360387] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.371330] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2086820, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.412062] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086816, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.513854] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.339s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.519258] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.549s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.519742] env[63279]: INFO nova.compute.claims [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1842.543034] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086819, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.544177] env[63279]: INFO nova.scheduler.client.report [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Deleted allocations for instance a301d225-684d-4f88-bc9b-7e02b8115b9d [ 1842.792332] env[63279]: DEBUG nova.network.neutron [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1842.874649] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2086820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.908732] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086816, 'name': CreateSnapshot_Task, 'duration_secs': 1.08531} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.909240] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1842.910329] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f908bad3-9a6e-4026-ab3b-54416c1379cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.046791] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086819, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.057253] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87178c67-9fe7-4e5b-83de-4d1d9bf7ef39 tempest-DeleteServersAdminTestJSON-181817188 tempest-DeleteServersAdminTestJSON-181817188-project-admin] Lock "a301d225-684d-4f88-bc9b-7e02b8115b9d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.904s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.057571] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "a301d225-684d-4f88-bc9b-7e02b8115b9d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 16.021s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.057571] env[63279]: INFO nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1843.057745] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "a301d225-684d-4f88-bc9b-7e02b8115b9d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.202634] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Updating instance_info_cache with network_info: [{"id": "ff3873e4-61b9-4b2d-80eb-2ace560fe858", "address": "fa:16:3e:4f:c4:9e", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff3873e4-61", "ovs_interfaceid": "ff3873e4-61b9-4b2d-80eb-2ace560fe858", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.334823] env[63279]: DEBUG nova.network.neutron [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Updating instance_info_cache with network_info: [{"id": "02e5d9ab-c16c-4141-a131-b7e9f2a2a194", "address": "fa:16:3e:24:da:b3", "network": {"id": "26ed2848-6f14-4264-af0d-f08d62ab4413", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-659194361-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e931e0ddf1b840fdb8743fbdba05b28d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02e5d9ab-c1", "ovs_interfaceid": "02e5d9ab-c16c-4141-a131-b7e9f2a2a194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.372984] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2086820, 'name': ReconfigVM_Task, 'duration_secs': 0.62773} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.374524] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 5d4909ea-396c-45ba-9ff5-acb8576150b3/5d4909ea-396c-45ba-9ff5-acb8576150b3.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1843.378048] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-afe266e3-a3ee-40e2-9adf-2d4ac76027de {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.391918] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Waiting for the task: (returnval){ [ 1843.391918] env[63279]: value = "task-2086822" [ 1843.391918] env[63279]: _type = "Task" [ 1843.391918] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.408564] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2086822, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.436750] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1843.436750] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8fc10abf-aa27-4516-b3d8-956749fdebf7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.450459] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1843.450459] env[63279]: value = "task-2086823" [ 1843.450459] env[63279]: _type = "Task" [ 1843.450459] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.461194] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086823, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.550316] env[63279]: DEBUG oslo_vmware.api [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086819, 'name': PowerOnVM_Task, 'duration_secs': 1.103059} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.550728] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1843.550946] env[63279]: INFO nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Took 11.39 seconds to spawn the instance on the hypervisor. [ 1843.551134] env[63279]: DEBUG nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1843.552053] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338eca62-495e-4126-9e45-8a29fede5cbc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.708921] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Releasing lock "refresh_cache-0224e4ea-c13c-4abd-9626-6038c0bbe4e9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.709151] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Updated the network info_cache for instance {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10518}} [ 1843.709415] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1843.712252] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1843.790035] env[63279]: DEBUG nova.compute.manager [None req-222265c7-ffcf-4315-8a3d-57e508bb68e5 tempest-ServerDiagnosticsV248Test-1904406206 tempest-ServerDiagnosticsV248Test-1904406206-project-admin] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1843.791218] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff54c6d1-cae7-4c68-b9e3-74a0a1428150 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.799151] env[63279]: INFO nova.compute.manager [None req-222265c7-ffcf-4315-8a3d-57e508bb68e5 tempest-ServerDiagnosticsV248Test-1904406206 tempest-ServerDiagnosticsV248Test-1904406206-project-admin] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Retrieving diagnostics [ 1843.802409] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68d6121-dc11-4deb-bd11-4810b04c95d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.839168] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Releasing lock "refresh_cache-1a55008a-4d8c-403d-a8f4-966aa7346f4c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.839168] env[63279]: DEBUG nova.compute.manager [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Instance network_info: |[{"id": "02e5d9ab-c16c-4141-a131-b7e9f2a2a194", "address": "fa:16:3e:24:da:b3", "network": {"id": "26ed2848-6f14-4264-af0d-f08d62ab4413", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-659194361-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e931e0ddf1b840fdb8743fbdba05b28d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02e5d9ab-c1", "ovs_interfaceid": "02e5d9ab-c16c-4141-a131-b7e9f2a2a194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1843.839708] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:da:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccf76700-491b-4462-ab19-e6d3a9ff87ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02e5d9ab-c16c-4141-a131-b7e9f2a2a194', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1843.846840] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Creating folder: Project (e931e0ddf1b840fdb8743fbdba05b28d). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1843.847225] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e42d94c-3ae6-4bfc-8149-da2b00a45a24 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.860641] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Created folder: Project (e931e0ddf1b840fdb8743fbdba05b28d) in parent group-v427491. [ 1843.860843] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Creating folder: Instances. Parent ref: group-v427542. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1843.861106] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f53facd-da15-4bb9-a996-26d24eb6d27e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.875554] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Created folder: Instances in parent group-v427542. [ 1843.875554] env[63279]: DEBUG oslo.service.loopingcall [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1843.875554] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1843.875837] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00bae01c-082b-4c4a-9316-146ab0a6e1bf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.905438] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2086822, 'name': Rename_Task, 'duration_secs': 0.299752} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.908352] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1843.908352] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1843.908352] env[63279]: value = "task-2086826" [ 1843.908352] env[63279]: _type = "Task" [ 1843.908352] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.908352] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e246c7c3-f25e-4fb2-a4a2-9e4bdf31a3b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.923086] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086826, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.923545] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Waiting for the task: (returnval){ [ 1843.923545] env[63279]: value = "task-2086827" [ 1843.923545] env[63279]: _type = "Task" [ 1843.923545] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.932657] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2086827, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.967792] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086823, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.079572] env[63279]: INFO nova.compute.manager [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Took 22.33 seconds to build instance. [ 1844.184412] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e8699e-b192-489f-b244-c17f73c37d76 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.193240] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710f7721-bd13-4db3-93d3-9a9ed9bfc6e7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.226552] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.227711] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af29ac52-cbc7-4162-8dc8-fa22224154c0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.239967] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb37e31d-4c47-4b6a-8d66-818c2f11d613 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.256367] env[63279]: DEBUG nova.compute.provider_tree [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1844.422188] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086826, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.436806] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2086827, 'name': PowerOnVM_Task} progress is 90%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.464718] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086823, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.582366] env[63279]: DEBUG oslo_concurrency.lockutils [None req-271b2613-9c7a-40b6-867e-c4c5c1580416 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "3d4db43f-5784-46e1-9710-f6becec011e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.060s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.759836] env[63279]: DEBUG nova.scheduler.client.report [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1844.834273] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "977fa519-2db3-4ee5-981d-c46820a8c72e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.834944] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "977fa519-2db3-4ee5-981d-c46820a8c72e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.921482] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086826, 'name': CreateVM_Task, 'duration_secs': 0.557645} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.921656] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1844.922412] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.922575] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.922886] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1844.923157] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66e697ad-bcd4-41ab-8377-c43c3998f705 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.931781] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 1844.931781] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52e70d1d-df68-368d-f517-2447462d93b5" [ 1844.931781] env[63279]: _type = "Task" [ 1844.931781] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.934830] env[63279]: DEBUG oslo_vmware.api [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2086827, 'name': PowerOnVM_Task, 'duration_secs': 0.571624} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.940199] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1844.940199] env[63279]: INFO nova.compute.manager [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Took 9.63 seconds to spawn the instance on the hypervisor. [ 1844.940199] env[63279]: DEBUG nova.compute.manager [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1844.940991] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a7b6c9-bdd1-44d4-84c0-d2145a934806 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.955332] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e70d1d-df68-368d-f517-2447462d93b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.969419] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086823, 'name': CloneVM_Task, 'duration_secs': 1.384505} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.969725] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Created linked-clone VM from snapshot [ 1844.970510] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f892fae-b45a-4829-9911-c9dc5df0b18f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.978620] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Uploading image 69bd6019-d068-4152-b519-73e464e4d6d1 {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1845.021761] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1845.021761] env[63279]: value = "vm-427541" [ 1845.021761] env[63279]: _type = "VirtualMachine" [ 1845.021761] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1845.022949] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8b96fe33-0917-441c-84a1-22407b98052e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.033353] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Lease: (returnval){ [ 1845.033353] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52472db0-cdf6-41f8-8686-ae291f4cd29f" [ 1845.033353] env[63279]: _type = "HttpNfcLease" [ 1845.033353] env[63279]: } obtained for exporting VM: (result){ [ 1845.033353] env[63279]: value = "vm-427541" [ 1845.033353] env[63279]: _type = "VirtualMachine" [ 1845.033353] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1845.033757] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the lease: (returnval){ [ 1845.033757] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52472db0-cdf6-41f8-8686-ae291f4cd29f" [ 1845.033757] env[63279]: _type = "HttpNfcLease" [ 1845.033757] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1845.050156] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1845.050156] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52472db0-cdf6-41f8-8686-ae291f4cd29f" [ 1845.050156] env[63279]: _type = "HttpNfcLease" [ 1845.050156] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1845.090038] env[63279]: DEBUG nova.compute.manager [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1845.211260] env[63279]: DEBUG nova.compute.manager [req-fcf964fc-baac-4a01-a230-44e59e9f1f3a req-2daa7649-ffcd-47a2-abdb-e2baa2b26955 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Received event network-changed-e1063b89-be88-474a-a2ec-b61eb11cf9fe {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1845.211260] env[63279]: DEBUG nova.compute.manager [req-fcf964fc-baac-4a01-a230-44e59e9f1f3a req-2daa7649-ffcd-47a2-abdb-e2baa2b26955 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Refreshing instance network info cache due to event network-changed-e1063b89-be88-474a-a2ec-b61eb11cf9fe. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1845.211260] env[63279]: DEBUG oslo_concurrency.lockutils [req-fcf964fc-baac-4a01-a230-44e59e9f1f3a req-2daa7649-ffcd-47a2-abdb-e2baa2b26955 service nova] Acquiring lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.211260] env[63279]: DEBUG oslo_concurrency.lockutils [req-fcf964fc-baac-4a01-a230-44e59e9f1f3a req-2daa7649-ffcd-47a2-abdb-e2baa2b26955 service nova] Acquired lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.211260] env[63279]: DEBUG nova.network.neutron [req-fcf964fc-baac-4a01-a230-44e59e9f1f3a req-2daa7649-ffcd-47a2-abdb-e2baa2b26955 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Refreshing network info cache for port e1063b89-be88-474a-a2ec-b61eb11cf9fe {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1845.271575] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.755s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.272752] env[63279]: DEBUG nova.compute.manager [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1845.274875] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.082s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.276379] env[63279]: INFO nova.compute.claims [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1845.446815] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e70d1d-df68-368d-f517-2447462d93b5, 'name': SearchDatastore_Task, 'duration_secs': 0.038716} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.446815] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.447018] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1845.447186] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.447624] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.447624] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1845.447861] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc0ca44a-e268-4f43-8725-164ba96e6b3d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.458934] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1845.459260] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1845.460352] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c91834f8-a97b-49c7-a57e-9994a4cf87af {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.470234] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 1845.470234] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52db1722-05d4-9dde-7de9-e1d2fcda9171" [ 1845.470234] env[63279]: _type = "Task" [ 1845.470234] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.471982] env[63279]: INFO nova.compute.manager [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Took 21.48 seconds to build instance. [ 1845.486278] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52db1722-05d4-9dde-7de9-e1d2fcda9171, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.505824] env[63279]: DEBUG nova.compute.manager [req-0b7bcddd-288a-42ba-852b-621a521c5eb3 req-06e0c108-39c6-4f1b-b7ad-a549b2f5acee service nova] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Received event network-changed-02e5d9ab-c16c-4141-a131-b7e9f2a2a194 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1845.505966] env[63279]: DEBUG nova.compute.manager [req-0b7bcddd-288a-42ba-852b-621a521c5eb3 req-06e0c108-39c6-4f1b-b7ad-a549b2f5acee service nova] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Refreshing instance network info cache due to event network-changed-02e5d9ab-c16c-4141-a131-b7e9f2a2a194. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1845.506213] env[63279]: DEBUG oslo_concurrency.lockutils [req-0b7bcddd-288a-42ba-852b-621a521c5eb3 req-06e0c108-39c6-4f1b-b7ad-a549b2f5acee service nova] Acquiring lock "refresh_cache-1a55008a-4d8c-403d-a8f4-966aa7346f4c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.506366] env[63279]: DEBUG oslo_concurrency.lockutils [req-0b7bcddd-288a-42ba-852b-621a521c5eb3 req-06e0c108-39c6-4f1b-b7ad-a549b2f5acee service nova] Acquired lock "refresh_cache-1a55008a-4d8c-403d-a8f4-966aa7346f4c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.506642] env[63279]: DEBUG nova.network.neutron [req-0b7bcddd-288a-42ba-852b-621a521c5eb3 req-06e0c108-39c6-4f1b-b7ad-a549b2f5acee service nova] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Refreshing network info cache for port 02e5d9ab-c16c-4141-a131-b7e9f2a2a194 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1845.542874] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1845.542874] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52472db0-cdf6-41f8-8686-ae291f4cd29f" [ 1845.542874] env[63279]: _type = "HttpNfcLease" [ 1845.542874] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1845.543229] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1845.543229] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52472db0-cdf6-41f8-8686-ae291f4cd29f" [ 1845.543229] env[63279]: _type = "HttpNfcLease" [ 1845.543229] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1845.544147] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbad057f-b676-4acb-b8d0-180b9fbc83a2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.553307] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52210752-7b70-d9e6-2ba9-cffb41e649d6/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1845.553493] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52210752-7b70-d9e6-2ba9-cffb41e649d6/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1845.660696] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.667322] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7661b0a1-b67a-4909-bb89-a8295900c3b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.782827] env[63279]: DEBUG nova.compute.utils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1845.787511] env[63279]: DEBUG nova.compute.manager [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1845.787511] env[63279]: DEBUG nova.network.neutron [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1845.846267] env[63279]: DEBUG nova.policy [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab1648b445ab458d92404e3a5ddb8619', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c98de1240f14b058e58f6a707096ef1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1845.979391] env[63279]: DEBUG oslo_concurrency.lockutils [None req-af15306e-e4bd-4235-87e6-bad065fff03b tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.826s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.990779] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52db1722-05d4-9dde-7de9-e1d2fcda9171, 'name': SearchDatastore_Task, 'duration_secs': 0.016673} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.991758] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79358092-9f4c-4687-a405-84dae0b9ca1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.003505] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 1846.003505] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52e415b1-4e1a-dce6-21ed-eddbf6721737" [ 1846.003505] env[63279]: _type = "Task" [ 1846.003505] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.016986] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e415b1-4e1a-dce6-21ed-eddbf6721737, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.128674] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Acquiring lock "abd63285-ee3c-4546-b86d-6d4388765d94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.128674] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Lock "abd63285-ee3c-4546-b86d-6d4388765d94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.128907] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Acquiring lock "abd63285-ee3c-4546-b86d-6d4388765d94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.129066] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Lock "abd63285-ee3c-4546-b86d-6d4388765d94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.129165] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Lock "abd63285-ee3c-4546-b86d-6d4388765d94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.131434] env[63279]: INFO nova.compute.manager [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Terminating instance [ 1846.287526] env[63279]: DEBUG nova.compute.manager [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1846.375620] env[63279]: DEBUG nova.network.neutron [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Successfully created port: 7d76225b-f1ff-4793-9c29-9a2cd30c6b43 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1846.482323] env[63279]: DEBUG nova.compute.manager [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1846.520249] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e415b1-4e1a-dce6-21ed-eddbf6721737, 'name': SearchDatastore_Task, 'duration_secs': 0.014841} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.520249] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.520249] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1a55008a-4d8c-403d-a8f4-966aa7346f4c/1a55008a-4d8c-403d-a8f4-966aa7346f4c.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1846.520828] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2ef26e8-c464-4771-9d22-9b91be21111d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.535110] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 1846.535110] env[63279]: value = "task-2086829" [ 1846.535110] env[63279]: _type = "Task" [ 1846.535110] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.544989] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2086829, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.563153] env[63279]: DEBUG nova.network.neutron [req-fcf964fc-baac-4a01-a230-44e59e9f1f3a req-2daa7649-ffcd-47a2-abdb-e2baa2b26955 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Updated VIF entry in instance network info cache for port e1063b89-be88-474a-a2ec-b61eb11cf9fe. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1846.563695] env[63279]: DEBUG nova.network.neutron [req-fcf964fc-baac-4a01-a230-44e59e9f1f3a req-2daa7649-ffcd-47a2-abdb-e2baa2b26955 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Updating instance_info_cache with network_info: [{"id": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "address": "fa:16:3e:b1:26:30", "network": {"id": "50f30894-1239-497e-9f70-afa5b0c429ea", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-85780566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfdf69a97cf54d5cb8c4fb1c59b6a5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1063b89-be", "ovs_interfaceid": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.635630] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Acquiring lock "refresh_cache-abd63285-ee3c-4546-b86d-6d4388765d94" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.636720] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Acquired lock "refresh_cache-abd63285-ee3c-4546-b86d-6d4388765d94" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.637745] env[63279]: DEBUG nova.network.neutron [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1846.672108] env[63279]: DEBUG nova.network.neutron [req-0b7bcddd-288a-42ba-852b-621a521c5eb3 req-06e0c108-39c6-4f1b-b7ad-a549b2f5acee service nova] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Updated VIF entry in instance network info cache for port 02e5d9ab-c16c-4141-a131-b7e9f2a2a194. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1846.672794] env[63279]: DEBUG nova.network.neutron [req-0b7bcddd-288a-42ba-852b-621a521c5eb3 req-06e0c108-39c6-4f1b-b7ad-a549b2f5acee service nova] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Updating instance_info_cache with network_info: [{"id": "02e5d9ab-c16c-4141-a131-b7e9f2a2a194", "address": "fa:16:3e:24:da:b3", "network": {"id": "26ed2848-6f14-4264-af0d-f08d62ab4413", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-659194361-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e931e0ddf1b840fdb8743fbdba05b28d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02e5d9ab-c1", "ovs_interfaceid": "02e5d9ab-c16c-4141-a131-b7e9f2a2a194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.963292] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40a7f4a-ff70-413c-9cfa-418c8940a0a8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.981883] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e94dcd-23ac-45e8-a37b-d53fa23521ac {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.022018] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b82c53a-253f-4f6b-9933-64a9385ec947 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.034366] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5470731-1531-45b2-8e8d-75cd4e3d1859 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.055248] env[63279]: DEBUG nova.compute.provider_tree [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1847.062514] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.064338] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2086829, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.069478] env[63279]: DEBUG oslo_concurrency.lockutils [req-fcf964fc-baac-4a01-a230-44e59e9f1f3a req-2daa7649-ffcd-47a2-abdb-e2baa2b26955 service nova] Releasing lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.069876] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquiring lock "de543869-8ab1-40ed-8f6d-dc506c257843" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.070134] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Lock "de543869-8ab1-40ed-8f6d-dc506c257843" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.175754] env[63279]: DEBUG nova.network.neutron [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1847.178258] env[63279]: DEBUG oslo_concurrency.lockutils [req-0b7bcddd-288a-42ba-852b-621a521c5eb3 req-06e0c108-39c6-4f1b-b7ad-a549b2f5acee service nova] Releasing lock "refresh_cache-1a55008a-4d8c-403d-a8f4-966aa7346f4c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.263579] env[63279]: DEBUG nova.network.neutron [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.301499] env[63279]: DEBUG nova.compute.manager [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1847.337167] env[63279]: DEBUG nova.virt.hardware [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1847.338043] env[63279]: DEBUG nova.virt.hardware [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1847.338190] env[63279]: DEBUG nova.virt.hardware [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1847.338441] env[63279]: DEBUG nova.virt.hardware [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1847.338632] env[63279]: DEBUG nova.virt.hardware [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1847.338819] env[63279]: DEBUG nova.virt.hardware [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1847.339082] env[63279]: DEBUG nova.virt.hardware [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1847.339452] env[63279]: DEBUG nova.virt.hardware [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1847.339452] env[63279]: DEBUG nova.virt.hardware [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1847.339694] env[63279]: DEBUG nova.virt.hardware [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1847.339884] env[63279]: DEBUG nova.virt.hardware [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1847.341026] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ad6233-e46c-40b7-b446-cb5d8e80082d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.351657] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9cc8ab-c186-4df3-ab45-2929df245249 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.555043] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2086829, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.656129} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.556542] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1a55008a-4d8c-403d-a8f4-966aa7346f4c/1a55008a-4d8c-403d-a8f4-966aa7346f4c.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1847.556940] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1847.557215] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64116f4f-8139-47cd-8f8d-a1c472547f97 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.566951] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 1847.566951] env[63279]: value = "task-2086830" [ 1847.566951] env[63279]: _type = "Task" [ 1847.566951] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.572650] env[63279]: DEBUG nova.scheduler.client.report [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1847.583638] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2086830, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.633382] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquiring lock "81103d53-99fe-4d1a-816f-7685c59c80ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.633738] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Lock "81103d53-99fe-4d1a-816f-7685c59c80ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.767598] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Releasing lock "refresh_cache-abd63285-ee3c-4546-b86d-6d4388765d94" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.767598] env[63279]: DEBUG nova.compute.manager [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1847.767598] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1847.774838] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf6373b-688c-46cd-a746-f32367e260de {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.779306] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "c05e9aaa-e251-480c-94d6-56c29bb6282d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.779685] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "c05e9aaa-e251-480c-94d6-56c29bb6282d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.780144] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "c05e9aaa-e251-480c-94d6-56c29bb6282d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.780505] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "c05e9aaa-e251-480c-94d6-56c29bb6282d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.780892] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "c05e9aaa-e251-480c-94d6-56c29bb6282d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.786598] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1847.787295] env[63279]: INFO nova.compute.manager [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Terminating instance [ 1847.789075] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c14053d-52ea-4810-987e-2d4f07a7ccbd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.799198] env[63279]: DEBUG oslo_vmware.api [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Waiting for the task: (returnval){ [ 1847.799198] env[63279]: value = "task-2086831" [ 1847.799198] env[63279]: _type = "Task" [ 1847.799198] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.810342] env[63279]: DEBUG oslo_vmware.api [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.087313] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.806s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.087313] env[63279]: DEBUG nova.compute.manager [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1848.095825] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2086830, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139893} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.096175] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.637s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1848.098154] env[63279]: INFO nova.compute.claims [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1848.101084] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1848.102559] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685bdd26-60d2-4121-a864-c5d590c9dfc6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.134205] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 1a55008a-4d8c-403d-a8f4-966aa7346f4c/1a55008a-4d8c-403d-a8f4-966aa7346f4c.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1848.135160] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9aa5c179-65de-4708-87c4-27b3e87eded2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.158379] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 1848.158379] env[63279]: value = "task-2086832" [ 1848.158379] env[63279]: _type = "Task" [ 1848.158379] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.168603] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2086832, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.295962] env[63279]: DEBUG nova.compute.manager [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1848.295962] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1848.296192] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efecad6-c93f-406c-92e9-1d72ff9558bc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.315350] env[63279]: DEBUG oslo_vmware.api [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086831, 'name': PowerOffVM_Task, 'duration_secs': 0.31931} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.317952] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1848.318180] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1848.318478] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1848.318740] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-194de16d-28ef-45be-a775-22f111b8a871 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.321767] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f7dc902-3eae-4143-b30c-debcbb02c0c0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.334944] env[63279]: DEBUG oslo_vmware.api [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1848.334944] env[63279]: value = "task-2086833" [ 1848.334944] env[63279]: _type = "Task" [ 1848.334944] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.346813] env[63279]: DEBUG oslo_vmware.api [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086833, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.361220] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1848.361220] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1848.361220] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Deleting the datastore file [datastore1] abd63285-ee3c-4546-b86d-6d4388765d94 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1848.361220] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df6dc186-53db-4527-8da4-036226e7b992 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.372518] env[63279]: DEBUG oslo_vmware.api [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Waiting for the task: (returnval){ [ 1848.372518] env[63279]: value = "task-2086835" [ 1848.372518] env[63279]: _type = "Task" [ 1848.372518] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.386073] env[63279]: DEBUG oslo_vmware.api [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086835, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.448776] env[63279]: DEBUG oslo_concurrency.lockutils [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Acquiring lock "f375b54b-f9de-4529-b752-52c240aed532" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.449260] env[63279]: DEBUG oslo_concurrency.lockutils [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Lock "f375b54b-f9de-4529-b752-52c240aed532" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1848.597453] env[63279]: DEBUG nova.compute.utils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1848.599118] env[63279]: DEBUG nova.compute.manager [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1848.599215] env[63279]: DEBUG nova.network.neutron [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1848.671736] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2086832, 'name': ReconfigVM_Task, 'duration_secs': 0.455808} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.671736] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 1a55008a-4d8c-403d-a8f4-966aa7346f4c/1a55008a-4d8c-403d-a8f4-966aa7346f4c.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1848.671736] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8dc69cc-749e-4eeb-96b0-37d09f025935 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.680349] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 1848.680349] env[63279]: value = "task-2086836" [ 1848.680349] env[63279]: _type = "Task" [ 1848.680349] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.694599] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2086836, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.743877] env[63279]: DEBUG nova.policy [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bfdf4a1d3ff3404fbc0bb9d3cc75a6dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '651b8183237a4e6dbef36aa2fb419f1b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1848.848514] env[63279]: DEBUG oslo_vmware.api [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086833, 'name': PowerOffVM_Task, 'duration_secs': 0.222195} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.848958] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1848.848958] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1848.849238] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3073ce22-c545-4171-ac0b-28769d9c8dfd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.886224] env[63279]: DEBUG oslo_vmware.api [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Task: {'id': task-2086835, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16838} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.886569] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1848.886780] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1848.886967] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1848.887163] env[63279]: INFO nova.compute.manager [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1848.887412] env[63279]: DEBUG oslo.service.loopingcall [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1848.887676] env[63279]: DEBUG nova.compute.manager [-] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1848.887818] env[63279]: DEBUG nova.network.neutron [-] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1848.930869] env[63279]: DEBUG nova.network.neutron [-] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1849.004483] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1849.004730] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1849.004933] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Deleting the datastore file [datastore1] c05e9aaa-e251-480c-94d6-56c29bb6282d {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1849.005228] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f03649cd-8f41-4333-a2ee-c7fb4b48d74f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.014320] env[63279]: DEBUG oslo_vmware.api [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1849.014320] env[63279]: value = "task-2086838" [ 1849.014320] env[63279]: _type = "Task" [ 1849.014320] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.024512] env[63279]: DEBUG oslo_vmware.api [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.106282] env[63279]: DEBUG nova.compute.manager [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1849.198711] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2086836, 'name': Rename_Task, 'duration_secs': 0.241577} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.200646] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1849.200956] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6a90fa2-1a4d-4e46-865d-1de1bdcfb9dc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.212760] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 1849.212760] env[63279]: value = "task-2086839" [ 1849.212760] env[63279]: _type = "Task" [ 1849.212760] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.225299] env[63279]: DEBUG oslo_concurrency.lockutils [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "3d4db43f-5784-46e1-9710-f6becec011e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.225579] env[63279]: DEBUG oslo_concurrency.lockutils [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "3d4db43f-5784-46e1-9710-f6becec011e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.225913] env[63279]: DEBUG oslo_concurrency.lockutils [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "3d4db43f-5784-46e1-9710-f6becec011e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.226027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "3d4db43f-5784-46e1-9710-f6becec011e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.226572] env[63279]: DEBUG oslo_concurrency.lockutils [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "3d4db43f-5784-46e1-9710-f6becec011e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.227792] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2086839, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.228503] env[63279]: INFO nova.compute.manager [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Terminating instance [ 1849.298091] env[63279]: DEBUG nova.network.neutron [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Successfully updated port: 7d76225b-f1ff-4793-9c29-9a2cd30c6b43 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1849.436583] env[63279]: DEBUG nova.network.neutron [-] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1849.521817] env[63279]: DEBUG nova.network.neutron [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Successfully created port: c1420e26-8471-4d0e-868c-9ff9ead67869 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1849.533856] env[63279]: DEBUG oslo_vmware.api [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086838, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.270699} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.537310] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1849.537310] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1849.537310] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1849.537310] env[63279]: INFO nova.compute.manager [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1849.537310] env[63279]: DEBUG oslo.service.loopingcall [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1849.537482] env[63279]: DEBUG nova.compute.manager [-] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1849.537482] env[63279]: DEBUG nova.network.neutron [-] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1849.726801] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2086839, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.727809] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76792fcd-c019-454f-b944-9d03be7cfe76 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.738667] env[63279]: DEBUG nova.compute.manager [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1849.738667] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1849.738667] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4865ef1f-723b-4ea0-80e8-184a1fd312f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.742140] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc63e08d-58d8-464e-bb4d-5857484211a8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.751227] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1849.776736] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-745ab744-4e66-4f0b-8000-edea9f29df63 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.779820] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6c033f-a6dc-4629-a1ed-16f75288fd29 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.789837] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32cff277-873f-4086-8a17-09fe576264b4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.795728] env[63279]: DEBUG oslo_vmware.api [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1849.795728] env[63279]: value = "task-2086840" [ 1849.795728] env[63279]: _type = "Task" [ 1849.795728] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.811184] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "refresh_cache-41654a82-af5e-463e-80f9-86ba13a5ad2e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1849.811431] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "refresh_cache-41654a82-af5e-463e-80f9-86ba13a5ad2e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1849.811611] env[63279]: DEBUG nova.network.neutron [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1849.813286] env[63279]: DEBUG nova.compute.provider_tree [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1849.822905] env[63279]: DEBUG oslo_vmware.api [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086840, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.829407] env[63279]: DEBUG nova.compute.manager [req-cad1d01d-cb30-45b5-a5b3-726227ec5e06 req-d5e5eef6-ebac-490c-ad7c-9607954b47a8 service nova] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Received event network-vif-plugged-7d76225b-f1ff-4793-9c29-9a2cd30c6b43 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1849.829644] env[63279]: DEBUG oslo_concurrency.lockutils [req-cad1d01d-cb30-45b5-a5b3-726227ec5e06 req-d5e5eef6-ebac-490c-ad7c-9607954b47a8 service nova] Acquiring lock "41654a82-af5e-463e-80f9-86ba13a5ad2e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.829897] env[63279]: DEBUG oslo_concurrency.lockutils [req-cad1d01d-cb30-45b5-a5b3-726227ec5e06 req-d5e5eef6-ebac-490c-ad7c-9607954b47a8 service nova] Lock "41654a82-af5e-463e-80f9-86ba13a5ad2e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.830108] env[63279]: DEBUG oslo_concurrency.lockutils [req-cad1d01d-cb30-45b5-a5b3-726227ec5e06 req-d5e5eef6-ebac-490c-ad7c-9607954b47a8 service nova] Lock "41654a82-af5e-463e-80f9-86ba13a5ad2e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.830298] env[63279]: DEBUG nova.compute.manager [req-cad1d01d-cb30-45b5-a5b3-726227ec5e06 req-d5e5eef6-ebac-490c-ad7c-9607954b47a8 service nova] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] No waiting events found dispatching network-vif-plugged-7d76225b-f1ff-4793-9c29-9a2cd30c6b43 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1849.830470] env[63279]: WARNING nova.compute.manager [req-cad1d01d-cb30-45b5-a5b3-726227ec5e06 req-d5e5eef6-ebac-490c-ad7c-9607954b47a8 service nova] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Received unexpected event network-vif-plugged-7d76225b-f1ff-4793-9c29-9a2cd30c6b43 for instance with vm_state building and task_state spawning. [ 1849.940337] env[63279]: INFO nova.compute.manager [-] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Took 1.05 seconds to deallocate network for instance. [ 1850.119216] env[63279]: DEBUG nova.compute.manager [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1850.147851] env[63279]: DEBUG nova.virt.hardware [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1850.148392] env[63279]: DEBUG nova.virt.hardware [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1850.148909] env[63279]: DEBUG nova.virt.hardware [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1850.149274] env[63279]: DEBUG nova.virt.hardware [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1850.149578] env[63279]: DEBUG nova.virt.hardware [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1850.149871] env[63279]: DEBUG nova.virt.hardware [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1850.150395] env[63279]: DEBUG nova.virt.hardware [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1850.151101] env[63279]: DEBUG nova.virt.hardware [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1850.151234] env[63279]: DEBUG nova.virt.hardware [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1850.152414] env[63279]: DEBUG nova.virt.hardware [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1850.152414] env[63279]: DEBUG nova.virt.hardware [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1850.153873] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3e92ed-af3d-4c38-aa78-ddf4682c8afd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.163511] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810e5530-f8be-4a14-9d4d-b4e1a4f780cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.224999] env[63279]: DEBUG oslo_vmware.api [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2086839, 'name': PowerOnVM_Task, 'duration_secs': 0.888135} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.225366] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1850.225626] env[63279]: INFO nova.compute.manager [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Took 10.20 seconds to spawn the instance on the hypervisor. [ 1850.225872] env[63279]: DEBUG nova.compute.manager [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1850.227281] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d200e14-2f86-4342-918f-9eadeff257b0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.309950] env[63279]: DEBUG oslo_vmware.api [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086840, 'name': PowerOffVM_Task, 'duration_secs': 0.217224} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.310351] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1850.310457] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1850.310694] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c02146f-c66b-4360-8bb1-b26e5af0a7d0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.322199] env[63279]: DEBUG nova.scheduler.client.report [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1850.361141] env[63279]: DEBUG nova.network.neutron [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1850.446871] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1850.553931] env[63279]: DEBUG nova.network.neutron [-] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.566583] env[63279]: DEBUG nova.network.neutron [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Updating instance_info_cache with network_info: [{"id": "7d76225b-f1ff-4793-9c29-9a2cd30c6b43", "address": "fa:16:3e:e0:73:75", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d76225b-f1", "ovs_interfaceid": "7d76225b-f1ff-4793-9c29-9a2cd30c6b43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.749698] env[63279]: INFO nova.compute.manager [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Took 25.30 seconds to build instance. [ 1850.836017] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.737s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.836017] env[63279]: DEBUG nova.compute.manager [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1850.837911] env[63279]: DEBUG oslo_concurrency.lockutils [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.357s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1850.838437] env[63279]: DEBUG nova.objects.instance [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Lazy-loading 'resources' on Instance uuid 21c2bba1-5482-496a-9e2a-f123a94ed48a {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1851.061665] env[63279]: INFO nova.compute.manager [-] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Took 1.53 seconds to deallocate network for instance. [ 1851.069937] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "refresh_cache-41654a82-af5e-463e-80f9-86ba13a5ad2e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.070709] env[63279]: DEBUG nova.compute.manager [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Instance network_info: |[{"id": "7d76225b-f1ff-4793-9c29-9a2cd30c6b43", "address": "fa:16:3e:e0:73:75", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d76225b-f1", "ovs_interfaceid": "7d76225b-f1ff-4793-9c29-9a2cd30c6b43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1851.071187] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:73:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d76225b-f1ff-4793-9c29-9a2cd30c6b43', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1851.081033] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Creating folder: Project (7c98de1240f14b058e58f6a707096ef1). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1851.081435] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e0836ce-43cd-455e-b68f-2fe35aa5a5e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.095794] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Created folder: Project (7c98de1240f14b058e58f6a707096ef1) in parent group-v427491. [ 1851.095867] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Creating folder: Instances. Parent ref: group-v427545. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1851.096139] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ebbbac70-d57a-47c4-b8cc-bf138febdfab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.108843] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Created folder: Instances in parent group-v427545. [ 1851.108843] env[63279]: DEBUG oslo.service.loopingcall [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1851.109827] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1851.109960] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fce8a0d8-e18e-405d-b171-97cfa9463e2d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.139494] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1851.139494] env[63279]: value = "task-2086844" [ 1851.139494] env[63279]: _type = "Task" [ 1851.139494] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.155430] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086844, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.254799] env[63279]: DEBUG oslo_concurrency.lockutils [None req-99df1493-5715-4262-b84c-33faf3142bfd tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "1a55008a-4d8c-403d-a8f4-966aa7346f4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.107s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.265708] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1851.266592] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1851.267175] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Deleting the datastore file [datastore1] 3d4db43f-5784-46e1-9710-f6becec011e2 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1851.268214] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-813637ac-9ef3-44fb-962e-1a7734b1b6ef {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.279301] env[63279]: DEBUG oslo_vmware.api [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 1851.279301] env[63279]: value = "task-2086845" [ 1851.279301] env[63279]: _type = "Task" [ 1851.279301] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.289979] env[63279]: DEBUG oslo_vmware.api [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086845, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.342023] env[63279]: DEBUG nova.compute.utils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1851.342023] env[63279]: DEBUG nova.compute.manager [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1851.342740] env[63279]: DEBUG nova.network.neutron [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1851.488081] env[63279]: DEBUG nova.policy [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '86b42263f9044626b1b9fdef5c745a5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12ff4ffe2e0748409eae11d95e843dd8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1851.571973] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.652928] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086844, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.755549] env[63279]: DEBUG nova.compute.manager [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1851.793862] env[63279]: DEBUG oslo_vmware.api [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2086845, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.321361} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.793862] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1851.793862] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1851.793862] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1851.794953] env[63279]: INFO nova.compute.manager [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Took 2.06 seconds to destroy the instance on the hypervisor. [ 1851.794953] env[63279]: DEBUG oslo.service.loopingcall [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1851.794953] env[63279]: DEBUG nova.compute.manager [-] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1851.794953] env[63279]: DEBUG nova.network.neutron [-] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1851.851085] env[63279]: DEBUG nova.compute.manager [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1852.015309] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af8a4d0-15e3-4b9b-a8e5-c096c6f73970 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.025719] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94e60bd-823f-4b58-ae00-a882ebb0cd35 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.060227] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d29b23-c795-472c-9001-3eebe741a163 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.069411] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671759f7-a076-4a10-bb2d-0d9db977ee8d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.090521] env[63279]: DEBUG nova.compute.provider_tree [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1852.153328] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086844, 'name': CreateVM_Task, 'duration_secs': 0.5485} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.153328] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1852.153972] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.154178] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.154622] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1852.154781] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a0703e1-bd0e-4e28-a928-b94f89098d6a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.160912] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1852.160912] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52762a6d-d71b-df67-a9c0-11fea3faff84" [ 1852.160912] env[63279]: _type = "Task" [ 1852.160912] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.171023] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52762a6d-d71b-df67-a9c0-11fea3faff84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.283892] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.446234] env[63279]: DEBUG nova.network.neutron [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Successfully created port: 9f0ded4b-2531-4688-a7fd-e27055112a5c {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1852.594751] env[63279]: DEBUG nova.scheduler.client.report [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1852.677309] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52762a6d-d71b-df67-a9c0-11fea3faff84, 'name': SearchDatastore_Task, 'duration_secs': 0.021446} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.677839] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.678083] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1852.678359] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.678653] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.678856] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1852.679154] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df45d4b4-548b-439a-9b3b-7bc51b41e9c4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.690359] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1852.690618] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1852.691316] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8f362e2-d523-4d1b-a8f4-9c60e3c52bf6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.698615] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1852.698615] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5214562e-e1af-b6a6-b1fe-544caa110f70" [ 1852.698615] env[63279]: _type = "Task" [ 1852.698615] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.711687] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5214562e-e1af-b6a6-b1fe-544caa110f70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.866497] env[63279]: DEBUG nova.compute.manager [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1852.904523] env[63279]: DEBUG nova.virt.hardware [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1852.904861] env[63279]: DEBUG nova.virt.hardware [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1852.905184] env[63279]: DEBUG nova.virt.hardware [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1852.905463] env[63279]: DEBUG nova.virt.hardware [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1852.905584] env[63279]: DEBUG nova.virt.hardware [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1852.905815] env[63279]: DEBUG nova.virt.hardware [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1852.906111] env[63279]: DEBUG nova.virt.hardware [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1852.906551] env[63279]: DEBUG nova.virt.hardware [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1852.906959] env[63279]: DEBUG nova.virt.hardware [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1852.907182] env[63279]: DEBUG nova.virt.hardware [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1852.907469] env[63279]: DEBUG nova.virt.hardware [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1852.908565] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66be2bad-8953-4a65-86da-db432355837b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.921755] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17cd460a-49d5-4780-8093-3b2c731b749e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.947323] env[63279]: DEBUG nova.network.neutron [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Successfully updated port: c1420e26-8471-4d0e-868c-9ff9ead67869 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1853.063955] env[63279]: DEBUG nova.compute.manager [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Received event network-changed-887ee4a3-0844-4571-84d8-486d7c21a7d2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1853.065618] env[63279]: DEBUG nova.compute.manager [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Refreshing instance network info cache due to event network-changed-887ee4a3-0844-4571-84d8-486d7c21a7d2. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1853.065988] env[63279]: DEBUG oslo_concurrency.lockutils [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] Acquiring lock "refresh_cache-5d4909ea-396c-45ba-9ff5-acb8576150b3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.066098] env[63279]: DEBUG oslo_concurrency.lockutils [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] Acquired lock "refresh_cache-5d4909ea-396c-45ba-9ff5-acb8576150b3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.066290] env[63279]: DEBUG nova.network.neutron [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Refreshing network info cache for port 887ee4a3-0844-4571-84d8-486d7c21a7d2 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1853.103631] env[63279]: DEBUG oslo_concurrency.lockutils [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.266s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.107956] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.063s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.108349] env[63279]: DEBUG nova.objects.instance [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Lazy-loading 'resources' on Instance uuid 8c712d0d-55c2-4a14-b759-9441594211e1 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1853.137968] env[63279]: INFO nova.scheduler.client.report [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Deleted allocations for instance 21c2bba1-5482-496a-9e2a-f123a94ed48a [ 1853.214733] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5214562e-e1af-b6a6-b1fe-544caa110f70, 'name': SearchDatastore_Task, 'duration_secs': 0.012535} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.214733] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eea6d53b-7c89-46d6-835f-6daa5f97044f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.219957] env[63279]: DEBUG nova.compute.manager [req-373c5da0-d0df-48a7-a808-07dc6ffb7524 req-b12b6f2d-31db-4791-bdd3-d790e5909d4c service nova] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Received event network-vif-plugged-c1420e26-8471-4d0e-868c-9ff9ead67869 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1853.220495] env[63279]: DEBUG oslo_concurrency.lockutils [req-373c5da0-d0df-48a7-a808-07dc6ffb7524 req-b12b6f2d-31db-4791-bdd3-d790e5909d4c service nova] Acquiring lock "00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.220721] env[63279]: DEBUG oslo_concurrency.lockutils [req-373c5da0-d0df-48a7-a808-07dc6ffb7524 req-b12b6f2d-31db-4791-bdd3-d790e5909d4c service nova] Lock "00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.220890] env[63279]: DEBUG oslo_concurrency.lockutils [req-373c5da0-d0df-48a7-a808-07dc6ffb7524 req-b12b6f2d-31db-4791-bdd3-d790e5909d4c service nova] Lock "00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.223022] env[63279]: DEBUG nova.compute.manager [req-373c5da0-d0df-48a7-a808-07dc6ffb7524 req-b12b6f2d-31db-4791-bdd3-d790e5909d4c service nova] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] No waiting events found dispatching network-vif-plugged-c1420e26-8471-4d0e-868c-9ff9ead67869 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1853.223022] env[63279]: WARNING nova.compute.manager [req-373c5da0-d0df-48a7-a808-07dc6ffb7524 req-b12b6f2d-31db-4791-bdd3-d790e5909d4c service nova] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Received unexpected event network-vif-plugged-c1420e26-8471-4d0e-868c-9ff9ead67869 for instance with vm_state building and task_state spawning. [ 1853.228508] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1853.228508] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5297f251-6438-5cb0-ab4a-433f612501ea" [ 1853.228508] env[63279]: _type = "Task" [ 1853.228508] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.240042] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5297f251-6438-5cb0-ab4a-433f612501ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.451366] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "refresh_cache-00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.451512] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquired lock "refresh_cache-00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.451664] env[63279]: DEBUG nova.network.neutron [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1853.462496] env[63279]: DEBUG nova.network.neutron [-] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.649662] env[63279]: DEBUG oslo_concurrency.lockutils [None req-be29f42f-ff6f-406b-a7e4-862f984891e9 tempest-ServerPasswordTestJSON-1558733645 tempest-ServerPasswordTestJSON-1558733645-project-member] Lock "21c2bba1-5482-496a-9e2a-f123a94ed48a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.735s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.749557] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5297f251-6438-5cb0-ab4a-433f612501ea, 'name': SearchDatastore_Task, 'duration_secs': 0.014578} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.750025] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.750391] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 41654a82-af5e-463e-80f9-86ba13a5ad2e/41654a82-af5e-463e-80f9-86ba13a5ad2e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1853.750856] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebc32185-4b0c-474a-a849-c1d90566b210 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.760398] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1853.760398] env[63279]: value = "task-2086846" [ 1853.760398] env[63279]: _type = "Task" [ 1853.760398] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.775187] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086846, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.965892] env[63279]: INFO nova.compute.manager [-] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Took 2.17 seconds to deallocate network for instance. [ 1854.061166] env[63279]: DEBUG nova.network.neutron [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1854.174219] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b185256e-7346-4bff-bd1c-0935be7ce67c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.184449] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30f614f-9b92-45e4-af21-c94267a50fc1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.224497] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32d7e56-1cfc-4b9a-b321-c3f261b47a8c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.233729] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafc9763-2dee-47f4-8174-502ae8bacc55 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.252525] env[63279]: DEBUG nova.compute.provider_tree [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1854.278840] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086846, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.453707] env[63279]: DEBUG nova.network.neutron [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Updated VIF entry in instance network info cache for port 887ee4a3-0844-4571-84d8-486d7c21a7d2. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1854.454509] env[63279]: DEBUG nova.network.neutron [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Updating instance_info_cache with network_info: [{"id": "887ee4a3-0844-4571-84d8-486d7c21a7d2", "address": "fa:16:3e:52:14:49", "network": {"id": "4b6f5566-0923-4169-8f7f-f8afa7b7550c", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-769172874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50803bde25ba44398da07d2a993e69f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap887ee4a3-08", "ovs_interfaceid": "887ee4a3-0844-4571-84d8-486d7c21a7d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.477655] env[63279]: DEBUG oslo_concurrency.lockutils [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.760675] env[63279]: DEBUG nova.scheduler.client.report [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1854.780348] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086846, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.831698} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.781555] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 41654a82-af5e-463e-80f9-86ba13a5ad2e/41654a82-af5e-463e-80f9-86ba13a5ad2e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1854.781888] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1854.783754] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5726ce8d-f980-4b8f-b0ba-17d34dfd35f4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.793426] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1854.793426] env[63279]: value = "task-2086847" [ 1854.793426] env[63279]: _type = "Task" [ 1854.793426] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.805340] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086847, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.838969] env[63279]: DEBUG nova.network.neutron [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Updating instance_info_cache with network_info: [{"id": "c1420e26-8471-4d0e-868c-9ff9ead67869", "address": "fa:16:3e:64:d0:23", "network": {"id": "4f155366-ade7-4d4b-8fad-a2b55798980f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-669492310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651b8183237a4e6dbef36aa2fb419f1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1420e26-84", "ovs_interfaceid": "c1420e26-8471-4d0e-868c-9ff9ead67869", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.960444] env[63279]: DEBUG oslo_concurrency.lockutils [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] Releasing lock "refresh_cache-5d4909ea-396c-45ba-9ff5-acb8576150b3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.960725] env[63279]: DEBUG nova.compute.manager [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Received event network-changed-7d76225b-f1ff-4793-9c29-9a2cd30c6b43 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1854.960792] env[63279]: DEBUG nova.compute.manager [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Refreshing instance network info cache due to event network-changed-7d76225b-f1ff-4793-9c29-9a2cd30c6b43. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1854.960995] env[63279]: DEBUG oslo_concurrency.lockutils [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] Acquiring lock "refresh_cache-41654a82-af5e-463e-80f9-86ba13a5ad2e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.961300] env[63279]: DEBUG oslo_concurrency.lockutils [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] Acquired lock "refresh_cache-41654a82-af5e-463e-80f9-86ba13a5ad2e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.961351] env[63279]: DEBUG nova.network.neutron [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Refreshing network info cache for port 7d76225b-f1ff-4793-9c29-9a2cd30c6b43 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1855.051237] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Acquiring lock "e04f06de-da6a-4562-a50a-ff16bf3a006e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.052011] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Lock "e04f06de-da6a-4562-a50a-ff16bf3a006e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.076554] env[63279]: DEBUG nova.network.neutron [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Successfully updated port: 9f0ded4b-2531-4688-a7fd-e27055112a5c {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1855.275047] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.167s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.280607] env[63279]: DEBUG oslo_concurrency.lockutils [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.938s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.280607] env[63279]: DEBUG nova.objects.instance [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Lazy-loading 'resources' on Instance uuid 24bce28c-fc43-4f17-9800-4d980f6729bc {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1855.309124] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086847, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082605} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.309935] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1855.310331] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da97651c-198d-4bde-859b-397f27d508f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.342717] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 41654a82-af5e-463e-80f9-86ba13a5ad2e/41654a82-af5e-463e-80f9-86ba13a5ad2e.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1855.344265] env[63279]: INFO nova.scheduler.client.report [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Deleted allocations for instance 8c712d0d-55c2-4a14-b759-9441594211e1 [ 1855.346016] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Releasing lock "refresh_cache-00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.346443] env[63279]: DEBUG nova.compute.manager [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Instance network_info: |[{"id": "c1420e26-8471-4d0e-868c-9ff9ead67869", "address": "fa:16:3e:64:d0:23", "network": {"id": "4f155366-ade7-4d4b-8fad-a2b55798980f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-669492310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651b8183237a4e6dbef36aa2fb419f1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1420e26-84", "ovs_interfaceid": "c1420e26-8471-4d0e-868c-9ff9ead67869", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1855.346870] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-029dabe7-0112-4c76-a4a6-979f3727f5bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.370790] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:d0:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50cf0a70-948d-4611-af05-94c1483064ed', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1420e26-8471-4d0e-868c-9ff9ead67869', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1855.379989] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Creating folder: Project (651b8183237a4e6dbef36aa2fb419f1b). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1855.382332] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-16d43b7f-5afc-4d1e-921e-70cfbdd15cb7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.392762] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1855.392762] env[63279]: value = "task-2086849" [ 1855.392762] env[63279]: _type = "Task" [ 1855.392762] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.402827] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Created folder: Project (651b8183237a4e6dbef36aa2fb419f1b) in parent group-v427491. [ 1855.402827] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Creating folder: Instances. Parent ref: group-v427548. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1855.405923] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1cdbd51d-0f37-47b3-a6b5-abd68cd7c9b3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.407397] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086849, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.419304] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Created folder: Instances in parent group-v427548. [ 1855.419565] env[63279]: DEBUG oslo.service.loopingcall [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1855.419778] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1855.419996] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6effc7d5-bbab-4973-ad5b-7d5c32ae78f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.447025] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1855.447025] env[63279]: value = "task-2086851" [ 1855.447025] env[63279]: _type = "Task" [ 1855.447025] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.465637] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086851, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.582141] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "refresh_cache-32e84715-0345-4171-abb7-c034a501347e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1855.582141] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquired lock "refresh_cache-32e84715-0345-4171-abb7-c034a501347e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.582141] env[63279]: DEBUG nova.network.neutron [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1855.884602] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d61d218b-0e55-4b2f-bcac-53de9fd09b97 tempest-TenantUsagesTestJSON-1157774739 tempest-TenantUsagesTestJSON-1157774739-project-member] Lock "8c712d0d-55c2-4a14-b759-9441594211e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.286s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.905068] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086849, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.955141] env[63279]: DEBUG nova.network.neutron [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Updated VIF entry in instance network info cache for port 7d76225b-f1ff-4793-9c29-9a2cd30c6b43. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1855.955519] env[63279]: DEBUG nova.network.neutron [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Updating instance_info_cache with network_info: [{"id": "7d76225b-f1ff-4793-9c29-9a2cd30c6b43", "address": "fa:16:3e:e0:73:75", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d76225b-f1", "ovs_interfaceid": "7d76225b-f1ff-4793-9c29-9a2cd30c6b43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.967792] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086851, 'name': CreateVM_Task, 'duration_secs': 0.402978} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.967973] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1855.968663] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1855.968828] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.969168] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1855.969427] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ff455eb-1742-420c-83da-cee86e4a1f53 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.976776] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 1855.976776] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52fe3729-10ac-1170-767f-5dea8cd13a7a" [ 1855.976776] env[63279]: _type = "Task" [ 1855.976776] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.991854] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fe3729-10ac-1170-767f-5dea8cd13a7a, 'name': SearchDatastore_Task, 'duration_secs': 0.011776} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.992183] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.992418] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1855.992679] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1855.992830] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.993040] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1855.993311] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4005837b-f9d3-4050-bfa1-a502f73a57fb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.010549] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1856.010778] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1856.011682] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89e59466-2f64-468f-927b-537e955c7523 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.022271] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 1856.022271] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5233c59e-3f48-db26-ea24-05bee669e697" [ 1856.022271] env[63279]: _type = "Task" [ 1856.022271] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.033420] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5233c59e-3f48-db26-ea24-05bee669e697, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.045438] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Acquiring lock "cf1b70af-335d-404b-bb4f-fe082dd6f450" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.045695] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Lock "cf1b70af-335d-404b-bb4f-fe082dd6f450" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.143298] env[63279]: DEBUG nova.compute.manager [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Received event network-changed-02e5d9ab-c16c-4141-a131-b7e9f2a2a194 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1856.143523] env[63279]: DEBUG nova.compute.manager [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Refreshing instance network info cache due to event network-changed-02e5d9ab-c16c-4141-a131-b7e9f2a2a194. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1856.143792] env[63279]: DEBUG oslo_concurrency.lockutils [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] Acquiring lock "refresh_cache-1a55008a-4d8c-403d-a8f4-966aa7346f4c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.143979] env[63279]: DEBUG oslo_concurrency.lockutils [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] Acquired lock "refresh_cache-1a55008a-4d8c-403d-a8f4-966aa7346f4c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.145312] env[63279]: DEBUG nova.network.neutron [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Refreshing network info cache for port 02e5d9ab-c16c-4141-a131-b7e9f2a2a194 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1856.154671] env[63279]: DEBUG nova.network.neutron [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1856.173426] env[63279]: DEBUG nova.compute.manager [req-51269bca-7747-4fd2-8aad-c2813ae5114a req-18f0a064-c1b8-429a-ace4-12caa0a0635e service nova] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Received event network-changed-c1420e26-8471-4d0e-868c-9ff9ead67869 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1856.173685] env[63279]: DEBUG nova.compute.manager [req-51269bca-7747-4fd2-8aad-c2813ae5114a req-18f0a064-c1b8-429a-ace4-12caa0a0635e service nova] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Refreshing instance network info cache due to event network-changed-c1420e26-8471-4d0e-868c-9ff9ead67869. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1856.174186] env[63279]: DEBUG oslo_concurrency.lockutils [req-51269bca-7747-4fd2-8aad-c2813ae5114a req-18f0a064-c1b8-429a-ace4-12caa0a0635e service nova] Acquiring lock "refresh_cache-00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.174378] env[63279]: DEBUG oslo_concurrency.lockutils [req-51269bca-7747-4fd2-8aad-c2813ae5114a req-18f0a064-c1b8-429a-ace4-12caa0a0635e service nova] Acquired lock "refresh_cache-00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.174583] env[63279]: DEBUG nova.network.neutron [req-51269bca-7747-4fd2-8aad-c2813ae5114a req-18f0a064-c1b8-429a-ace4-12caa0a0635e service nova] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Refreshing network info cache for port c1420e26-8471-4d0e-868c-9ff9ead67869 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1856.407272] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086849, 'name': ReconfigVM_Task, 'duration_secs': 0.722073} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.407634] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 41654a82-af5e-463e-80f9-86ba13a5ad2e/41654a82-af5e-463e-80f9-86ba13a5ad2e.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1856.410097] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b55f7dcf-a992-429d-b7f4-d1ee877359e3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.417596] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1856.417596] env[63279]: value = "task-2086852" [ 1856.417596] env[63279]: _type = "Task" [ 1856.417596] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.435421] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086852, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.459150] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec62753b-8c84-4ac6-ba2f-bd142bc32e90 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.463437] env[63279]: DEBUG oslo_concurrency.lockutils [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] Releasing lock "refresh_cache-41654a82-af5e-463e-80f9-86ba13a5ad2e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.464123] env[63279]: DEBUG nova.compute.manager [req-c763ffbf-d9b5-40e7-90d1-f930761cca96 req-3e5434bd-b0c4-44a4-9eb4-4258466e12a6 service nova] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Received event network-vif-deleted-404425c8-83a7-4c0b-b938-18e89530246f {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1856.470724] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d422ce-442e-470d-ae1b-0430a0e8f48f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.511685] env[63279]: DEBUG nova.network.neutron [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Updating instance_info_cache with network_info: [{"id": "9f0ded4b-2531-4688-a7fd-e27055112a5c", "address": "fa:16:3e:44:ca:62", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0ded4b-25", "ovs_interfaceid": "9f0ded4b-2531-4688-a7fd-e27055112a5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1856.513439] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ede221f-5672-4d19-9b39-ca01de3429b0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.523037] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00293ce2-4067-48d3-8e9d-50a3d0cbb2ce {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.542176] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5233c59e-3f48-db26-ea24-05bee669e697, 'name': SearchDatastore_Task, 'duration_secs': 0.013638} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.553129] env[63279]: DEBUG nova.compute.provider_tree [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1856.554682] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8572a827-a9ca-4a59-996e-be864fe8e625 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.563274] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 1856.563274] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52efb8e6-171d-1530-ff3f-4e293f6d7f0d" [ 1856.563274] env[63279]: _type = "Task" [ 1856.563274] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.574933] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52efb8e6-171d-1530-ff3f-4e293f6d7f0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.937852] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086852, 'name': Rename_Task, 'duration_secs': 0.211942} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.938724] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1856.943019] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2dbe453-735a-4021-9dc8-0bdcfb8d5a35 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.947883] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1856.947883] env[63279]: value = "task-2086853" [ 1856.947883] env[63279]: _type = "Task" [ 1856.947883] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.957552] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086853, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.017634] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Releasing lock "refresh_cache-32e84715-0345-4171-abb7-c034a501347e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1857.017974] env[63279]: DEBUG nova.compute.manager [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Instance network_info: |[{"id": "9f0ded4b-2531-4688-a7fd-e27055112a5c", "address": "fa:16:3e:44:ca:62", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0ded4b-25", "ovs_interfaceid": "9f0ded4b-2531-4688-a7fd-e27055112a5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1857.018402] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:ca:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f0ded4b-2531-4688-a7fd-e27055112a5c', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1857.034412] env[63279]: DEBUG oslo.service.loopingcall [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1857.034412] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32e84715-0345-4171-abb7-c034a501347e] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1857.034875] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d2fb93f-9e84-4397-a5b1-f81bfb21173f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.059522] env[63279]: DEBUG nova.scheduler.client.report [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1857.066937] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1857.066937] env[63279]: value = "task-2086854" [ 1857.066937] env[63279]: _type = "Task" [ 1857.066937] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.083040] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086854, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.093805] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52efb8e6-171d-1530-ff3f-4e293f6d7f0d, 'name': SearchDatastore_Task, 'duration_secs': 0.013665} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.094359] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1857.094630] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7/00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1857.099537] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afebf19c-0322-4628-8db2-cc154c6e3ba3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.109848] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 1857.109848] env[63279]: value = "task-2086855" [ 1857.109848] env[63279]: _type = "Task" [ 1857.109848] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.120343] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086855, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.233268] env[63279]: DEBUG nova.network.neutron [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Updated VIF entry in instance network info cache for port 02e5d9ab-c16c-4141-a131-b7e9f2a2a194. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1857.233268] env[63279]: DEBUG nova.network.neutron [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Updating instance_info_cache with network_info: [{"id": "02e5d9ab-c16c-4141-a131-b7e9f2a2a194", "address": "fa:16:3e:24:da:b3", "network": {"id": "26ed2848-6f14-4264-af0d-f08d62ab4413", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-659194361-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e931e0ddf1b840fdb8743fbdba05b28d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02e5d9ab-c1", "ovs_interfaceid": "02e5d9ab-c16c-4141-a131-b7e9f2a2a194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.301582] env[63279]: DEBUG nova.network.neutron [req-51269bca-7747-4fd2-8aad-c2813ae5114a req-18f0a064-c1b8-429a-ace4-12caa0a0635e service nova] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Updated VIF entry in instance network info cache for port c1420e26-8471-4d0e-868c-9ff9ead67869. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1857.302044] env[63279]: DEBUG nova.network.neutron [req-51269bca-7747-4fd2-8aad-c2813ae5114a req-18f0a064-c1b8-429a-ace4-12caa0a0635e service nova] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Updating instance_info_cache with network_info: [{"id": "c1420e26-8471-4d0e-868c-9ff9ead67869", "address": "fa:16:3e:64:d0:23", "network": {"id": "4f155366-ade7-4d4b-8fad-a2b55798980f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-669492310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651b8183237a4e6dbef36aa2fb419f1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1420e26-84", "ovs_interfaceid": "c1420e26-8471-4d0e-868c-9ff9ead67869", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.460704] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086853, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.567119] env[63279]: DEBUG oslo_concurrency.lockutils [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.287s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.568711] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.214s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.570798] env[63279]: INFO nova.compute.claims [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1857.591117] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086854, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.622578] env[63279]: INFO nova.scheduler.client.report [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Deleted allocations for instance 24bce28c-fc43-4f17-9800-4d980f6729bc [ 1857.633022] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086855, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.735870] env[63279]: DEBUG oslo_concurrency.lockutils [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] Releasing lock "refresh_cache-1a55008a-4d8c-403d-a8f4-966aa7346f4c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1857.735870] env[63279]: DEBUG nova.compute.manager [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] [instance: 32e84715-0345-4171-abb7-c034a501347e] Received event network-vif-plugged-9f0ded4b-2531-4688-a7fd-e27055112a5c {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1857.736067] env[63279]: DEBUG oslo_concurrency.lockutils [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] Acquiring lock "32e84715-0345-4171-abb7-c034a501347e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.736193] env[63279]: DEBUG oslo_concurrency.lockutils [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] Lock "32e84715-0345-4171-abb7-c034a501347e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.736372] env[63279]: DEBUG oslo_concurrency.lockutils [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] Lock "32e84715-0345-4171-abb7-c034a501347e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.736518] env[63279]: DEBUG nova.compute.manager [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] [instance: 32e84715-0345-4171-abb7-c034a501347e] No waiting events found dispatching network-vif-plugged-9f0ded4b-2531-4688-a7fd-e27055112a5c {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1857.736683] env[63279]: WARNING nova.compute.manager [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] [instance: 32e84715-0345-4171-abb7-c034a501347e] Received unexpected event network-vif-plugged-9f0ded4b-2531-4688-a7fd-e27055112a5c for instance with vm_state building and task_state spawning. [ 1857.736878] env[63279]: DEBUG nova.compute.manager [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] [instance: 32e84715-0345-4171-abb7-c034a501347e] Received event network-changed-9f0ded4b-2531-4688-a7fd-e27055112a5c {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1857.737075] env[63279]: DEBUG nova.compute.manager [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] [instance: 32e84715-0345-4171-abb7-c034a501347e] Refreshing instance network info cache due to event network-changed-9f0ded4b-2531-4688-a7fd-e27055112a5c. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1857.737284] env[63279]: DEBUG oslo_concurrency.lockutils [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] Acquiring lock "refresh_cache-32e84715-0345-4171-abb7-c034a501347e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1857.737427] env[63279]: DEBUG oslo_concurrency.lockutils [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] Acquired lock "refresh_cache-32e84715-0345-4171-abb7-c034a501347e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1857.737598] env[63279]: DEBUG nova.network.neutron [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] [instance: 32e84715-0345-4171-abb7-c034a501347e] Refreshing network info cache for port 9f0ded4b-2531-4688-a7fd-e27055112a5c {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1857.795803] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52210752-7b70-d9e6-2ba9-cffb41e649d6/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1857.797073] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123eacac-419c-425b-9964-b823d7513467 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.805566] env[63279]: DEBUG oslo_concurrency.lockutils [req-51269bca-7747-4fd2-8aad-c2813ae5114a req-18f0a064-c1b8-429a-ace4-12caa0a0635e service nova] Releasing lock "refresh_cache-00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1857.809039] env[63279]: DEBUG nova.compute.manager [req-51269bca-7747-4fd2-8aad-c2813ae5114a req-18f0a064-c1b8-429a-ace4-12caa0a0635e service nova] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Received event network-vif-deleted-b2232b49-2a2b-47ea-b0a9-5ec94f197f11 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1857.809940] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52210752-7b70-d9e6-2ba9-cffb41e649d6/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1857.809940] env[63279]: ERROR oslo_vmware.rw_handles [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52210752-7b70-d9e6-2ba9-cffb41e649d6/disk-0.vmdk due to incomplete transfer. [ 1857.810238] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d0cacdfc-713e-4097-b912-3a2246abfbc6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.839622] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52210752-7b70-d9e6-2ba9-cffb41e649d6/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1857.839859] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Uploaded image 69bd6019-d068-4152-b519-73e464e4d6d1 to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1857.843030] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1857.843030] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6ab01fe2-e374-433c-b725-c5c942deb69c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.852177] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1857.852177] env[63279]: value = "task-2086856" [ 1857.852177] env[63279]: _type = "Task" [ 1857.852177] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.862874] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086856, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.964925] env[63279]: DEBUG oslo_vmware.api [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086853, 'name': PowerOnVM_Task, 'duration_secs': 0.908103} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.966020] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1857.968911] env[63279]: INFO nova.compute.manager [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Took 10.66 seconds to spawn the instance on the hypervisor. [ 1857.968911] env[63279]: DEBUG nova.compute.manager [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1857.968911] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cf125f-363a-4c97-b004-6da4ab398554 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.102227] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086854, 'name': CreateVM_Task, 'duration_secs': 0.624597} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.102227] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32e84715-0345-4171-abb7-c034a501347e] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1858.102227] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.102227] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.102227] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1858.103430] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87a8667c-06bd-4431-833e-d9c1a10927c4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.111137] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1858.111137] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ee45c5-2e67-e106-77fe-9d1d52052623" [ 1858.111137] env[63279]: _type = "Task" [ 1858.111137] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.129526] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ee45c5-2e67-e106-77fe-9d1d52052623, 'name': SearchDatastore_Task, 'duration_secs': 0.012331} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.130152] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086855, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598755} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.130152] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1858.130490] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1858.130646] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.130777] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.134022] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1858.134022] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7/00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1858.134022] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1858.134289] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0655ab24-9fac-49e7-b994-489be7269450 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.137471] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65e6c0b3-ad14-4c91-a52b-495dd928a53e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.140413] env[63279]: DEBUG oslo_concurrency.lockutils [None req-21fcbb29-dd97-4731-aa59-0856e81c5314 tempest-ServerExternalEventsTest-2060906853 tempest-ServerExternalEventsTest-2060906853-project-member] Lock "24bce28c-fc43-4f17-9800-4d980f6729bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.790s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.150961] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 1858.150961] env[63279]: value = "task-2086857" [ 1858.150961] env[63279]: _type = "Task" [ 1858.150961] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.156181] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1858.159019] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1858.160548] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bd7425d-706d-4e81-bb78-c0b2f9fdb65e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.171300] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1858.171300] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52781193-016d-6c89-eda6-7b6738938275" [ 1858.171300] env[63279]: _type = "Task" [ 1858.171300] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.171546] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086857, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.186366] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52781193-016d-6c89-eda6-7b6738938275, 'name': SearchDatastore_Task, 'duration_secs': 0.011516} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.187246] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20cc83ca-ce28-4d1a-9f86-af6ecea577b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.196062] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1858.196062] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5224bf8a-3d4b-976d-7162-b0b45eebebdd" [ 1858.196062] env[63279]: _type = "Task" [ 1858.196062] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.209593] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5224bf8a-3d4b-976d-7162-b0b45eebebdd, 'name': SearchDatastore_Task, 'duration_secs': 0.010588} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.209897] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1858.210170] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 32e84715-0345-4171-abb7-c034a501347e/32e84715-0345-4171-abb7-c034a501347e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1858.210448] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2c03c71-c57f-452c-99a9-012173ac31a8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.220300] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1858.220300] env[63279]: value = "task-2086858" [ 1858.220300] env[63279]: _type = "Task" [ 1858.220300] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.232724] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086858, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.370102] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086856, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.498799] env[63279]: INFO nova.compute.manager [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Took 27.55 seconds to build instance. [ 1858.593636] env[63279]: DEBUG nova.network.neutron [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] [instance: 32e84715-0345-4171-abb7-c034a501347e] Updated VIF entry in instance network info cache for port 9f0ded4b-2531-4688-a7fd-e27055112a5c. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1858.594065] env[63279]: DEBUG nova.network.neutron [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] [instance: 32e84715-0345-4171-abb7-c034a501347e] Updating instance_info_cache with network_info: [{"id": "9f0ded4b-2531-4688-a7fd-e27055112a5c", "address": "fa:16:3e:44:ca:62", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0ded4b-25", "ovs_interfaceid": "9f0ded4b-2531-4688-a7fd-e27055112a5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.673451] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086857, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076525} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.673848] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1858.674647] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7ddfbe-6516-4517-97d0-50870294d274 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.703224] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7/00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1858.706641] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27d5a85b-0503-4c47-9db8-f6f9822f8e37 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.734385] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086858, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.738023] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 1858.738023] env[63279]: value = "task-2086859" [ 1858.738023] env[63279]: _type = "Task" [ 1858.738023] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.747692] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086859, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.870840] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086856, 'name': Destroy_Task, 'duration_secs': 0.842387} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.873683] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Destroyed the VM [ 1858.876083] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1858.879191] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-21e24231-2a64-4849-935e-f68683c9b42d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.888952] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1858.888952] env[63279]: value = "task-2086860" [ 1858.888952] env[63279]: _type = "Task" [ 1858.888952] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.915237] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086860, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.999153] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7fcc8714-6322-470e-8fb8-cd968acaa101 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "41654a82-af5e-463e-80f9-86ba13a5ad2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.126s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.096768] env[63279]: DEBUG oslo_concurrency.lockutils [req-803d5559-7161-4bb3-bf4d-2b76823d54d7 req-6d932776-6720-4d7b-b7fd-21a74ab40274 service nova] Releasing lock "refresh_cache-32e84715-0345-4171-abb7-c034a501347e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1859.243220] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086858, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.696552} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.249398] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 32e84715-0345-4171-abb7-c034a501347e/32e84715-0345-4171-abb7-c034a501347e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1859.250113] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1859.253060] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4f9a0c52-85c3-4132-bc1e-bdbb5d6add60 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.261948] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086859, 'name': ReconfigVM_Task, 'duration_secs': 0.444301} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.264889] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7/00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1859.265214] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1859.265214] env[63279]: value = "task-2086861" [ 1859.265214] env[63279]: _type = "Task" [ 1859.265214] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.265417] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f1779f91-cd9c-4488-80eb-081bb4bd7e68 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.283429] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086861, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.291965] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 1859.291965] env[63279]: value = "task-2086862" [ 1859.291965] env[63279]: _type = "Task" [ 1859.291965] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.292775] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Acquiring lock "c287072d-0ce9-4075-8895-0f64326ac303" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.292775] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Lock "c287072d-0ce9-4075-8895-0f64326ac303" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.302926] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9338367-4ea1-4d7c-bbc9-2971d71ed2b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.311910] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086862, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.315727] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454ff246-902c-4db3-b40e-3f30a81852ab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.360205] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4763fa7a-de11-4dc9-91aa-43428ff8ba45 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.369690] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4edc1934-58b9-405d-8248-107e74abc174 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.390477] env[63279]: DEBUG nova.compute.provider_tree [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1859.403467] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086860, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.502612] env[63279]: DEBUG nova.compute.manager [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1859.778310] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086861, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121168} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.778310] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1859.778815] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535c6154-856e-4200-b55e-a454199a243b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.804307] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 32e84715-0345-4171-abb7-c034a501347e/32e84715-0345-4171-abb7-c034a501347e.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1859.811110] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3303e25e-6f72-403b-a457-04b3b915e724 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.838929] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086862, 'name': Rename_Task, 'duration_secs': 0.173724} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.841317] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1859.841317] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1859.841317] env[63279]: value = "task-2086863" [ 1859.841317] env[63279]: _type = "Task" [ 1859.841317] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.841317] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-549b965b-be72-49b5-a40f-18b1dc20295f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.853957] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086863, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.854932] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 1859.854932] env[63279]: value = "task-2086864" [ 1859.854932] env[63279]: _type = "Task" [ 1859.854932] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.894274] env[63279]: DEBUG nova.scheduler.client.report [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1859.910409] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086860, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.031955] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.365815] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086863, 'name': ReconfigVM_Task, 'duration_secs': 0.41753} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.366658] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 32e84715-0345-4171-abb7-c034a501347e/32e84715-0345-4171-abb7-c034a501347e.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1860.367217] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b283256b-fe33-48ef-a91c-6bf4551f9236 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.381139] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086864, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.393749] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1860.393749] env[63279]: value = "task-2086865" [ 1860.393749] env[63279]: _type = "Task" [ 1860.393749] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.408221] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.838s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.408859] env[63279]: DEBUG nova.compute.manager [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1860.417146] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086865, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.417146] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 24.026s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.423872] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086860, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.869188] env[63279]: DEBUG oslo_vmware.api [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086864, 'name': PowerOnVM_Task, 'duration_secs': 0.573443} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.869455] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1860.869486] env[63279]: INFO nova.compute.manager [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Took 10.75 seconds to spawn the instance on the hypervisor. [ 1860.869823] env[63279]: DEBUG nova.compute.manager [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1860.870852] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d17753f-a502-44ff-b046-1e730f941ed3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.905775] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086865, 'name': Rename_Task, 'duration_secs': 0.211134} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.906493] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1860.906775] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2da373c8-caac-42a5-85af-d98ffe4b6aa3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.911964] env[63279]: DEBUG oslo_vmware.api [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086860, 'name': RemoveSnapshot_Task, 'duration_secs': 1.540188} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.913703] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1860.914130] env[63279]: INFO nova.compute.manager [None req-5a57f975-1224-4ba7-b7d5-e0259018db0c tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Took 20.08 seconds to snapshot the instance on the hypervisor. [ 1860.919879] env[63279]: DEBUG nova.compute.utils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1860.921239] env[63279]: INFO nova.compute.claims [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1860.926813] env[63279]: DEBUG nova.compute.manager [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1860.926997] env[63279]: DEBUG nova.network.neutron [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1860.929048] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1860.929048] env[63279]: value = "task-2086866" [ 1860.929048] env[63279]: _type = "Task" [ 1860.929048] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.941098] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086866, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.058363] env[63279]: DEBUG nova.policy [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92ab38bbf74d4cccb2637786a651ed5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ea345c4bc9b4781acb6f774ac88c690', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1861.087912] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquiring lock "044335c7-ce3b-4b4a-b1dc-8b9acec538b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.088787] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Lock "044335c7-ce3b-4b4a-b1dc-8b9acec538b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.187054] env[63279]: INFO nova.compute.manager [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Rebuilding instance [ 1861.276147] env[63279]: DEBUG nova.compute.manager [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1861.276664] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7d0ce1-2224-413e-a95d-9fa60cfec4ae {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.399521] env[63279]: INFO nova.compute.manager [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Took 29.23 seconds to build instance. [ 1861.427256] env[63279]: DEBUG nova.compute.manager [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1861.435702] env[63279]: INFO nova.compute.resource_tracker [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating resource usage from migration 9d057922-d0fc-4486-a0ee-a4b999090b3b [ 1861.462574] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086866, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.684505] env[63279]: DEBUG nova.network.neutron [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Successfully created port: ec7f4196-e7c7-425d-aa69-d8a766edef7f {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1861.907748] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38d597a6-cdfb-4da8-926a-02d3447e2334 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.149s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.957105] env[63279]: DEBUG oslo_vmware.api [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086866, 'name': PowerOnVM_Task, 'duration_secs': 1.021148} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.957105] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1861.957937] env[63279]: INFO nova.compute.manager [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Took 9.09 seconds to spawn the instance on the hypervisor. [ 1861.957937] env[63279]: DEBUG nova.compute.manager [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1861.958194] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4a6a1f-7c71-4952-9d9d-03bd5d3aa5bf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.084871] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68394fd3-6879-4268-9ca4-756cb139ebd9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.096072] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29bfa75a-647c-4822-980c-c210cafb51d6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.138768] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf157a40-5c18-4525-953e-0524e9503e00 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.147443] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-407cacf4-4cb1-48e9-ae00-cc58f22ae5ea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.165814] env[63279]: DEBUG nova.compute.provider_tree [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1862.294045] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1862.294357] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-898330e1-4abb-4e91-8edf-09c131ab7680 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.305585] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1862.305585] env[63279]: value = "task-2086867" [ 1862.305585] env[63279]: _type = "Task" [ 1862.305585] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.322893] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086867, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.413930] env[63279]: DEBUG nova.compute.manager [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1862.439437] env[63279]: DEBUG nova.compute.manager [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1862.475013] env[63279]: DEBUG nova.virt.hardware [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1862.475800] env[63279]: DEBUG nova.virt.hardware [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1862.475800] env[63279]: DEBUG nova.virt.hardware [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1862.476036] env[63279]: DEBUG nova.virt.hardware [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1862.476349] env[63279]: DEBUG nova.virt.hardware [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1862.476586] env[63279]: DEBUG nova.virt.hardware [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1862.476865] env[63279]: DEBUG nova.virt.hardware [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1862.477486] env[63279]: DEBUG nova.virt.hardware [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1862.477486] env[63279]: DEBUG nova.virt.hardware [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1862.477486] env[63279]: DEBUG nova.virt.hardware [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1862.477758] env[63279]: DEBUG nova.virt.hardware [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1862.479649] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a78b25f-fe5b-4b22-abcc-0974fac1b885 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.498183] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372371a0-0178-47d0-9e10-46ec0f391c89 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.504410] env[63279]: INFO nova.compute.manager [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Took 30.07 seconds to build instance. [ 1862.668846] env[63279]: DEBUG nova.scheduler.client.report [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1862.817930] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086867, 'name': PowerOffVM_Task, 'duration_secs': 0.220941} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.818425] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1862.818677] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1862.819467] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3412ed5b-9675-4aa8-aa27-877302041612 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.827667] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1862.827932] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da59f8fb-c507-49e1-86a5-fe5d1abe93d4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.921616] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1862.921939] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1862.922092] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleting the datastore file [datastore1] 41654a82-af5e-463e-80f9-86ba13a5ad2e {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1862.922390] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-439a3bcb-d1fb-450b-8192-421127571baf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.936020] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1862.936020] env[63279]: value = "task-2086869" [ 1862.936020] env[63279]: _type = "Task" [ 1862.936020] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.942764] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.946452] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.007589] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1366226e-68a1-4af4-bafa-76a748070601 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "32e84715-0345-4171-abb7-c034a501347e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.237s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.176740] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.763s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.176968] env[63279]: INFO nova.compute.manager [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Migrating [ 1863.177229] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.177381] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired lock "compute-rpcapi-router" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.178694] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.051s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.181098] env[63279]: INFO nova.compute.claims [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1863.184976] env[63279]: INFO nova.compute.rpcapi [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 1863.185539] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Releasing lock "compute-rpcapi-router" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.449879] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086869, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18526} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.450196] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1863.450482] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1863.452018] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1863.510725] env[63279]: DEBUG nova.compute.manager [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1863.705422] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.705606] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.706581] env[63279]: DEBUG nova.network.neutron [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1863.768840] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Acquiring lock "df410051-d551-4a90-81f7-5630f5521a10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.769239] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Lock "df410051-d551-4a90-81f7-5630f5521a10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.052963] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.409280] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b31f7e06-12a4-4396-a0c9-2a7687ef7413 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.418305] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e9f39d3a-f8ae-4803-b846-690e45084c00 tempest-ServersAdminNegativeTestJSON-1085002939 tempest-ServersAdminNegativeTestJSON-1085002939-project-admin] [instance: 32e84715-0345-4171-abb7-c034a501347e] Suspending the VM {{(pid=63279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1864.421175] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-47f75df8-bd51-4b60-bc1e-13c98dceafe4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.430661] env[63279]: DEBUG oslo_vmware.api [None req-e9f39d3a-f8ae-4803-b846-690e45084c00 tempest-ServersAdminNegativeTestJSON-1085002939 tempest-ServersAdminNegativeTestJSON-1085002939-project-admin] Waiting for the task: (returnval){ [ 1864.430661] env[63279]: value = "task-2086870" [ 1864.430661] env[63279]: _type = "Task" [ 1864.430661] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.446211] env[63279]: DEBUG oslo_vmware.api [None req-e9f39d3a-f8ae-4803-b846-690e45084c00 tempest-ServersAdminNegativeTestJSON-1085002939 tempest-ServersAdminNegativeTestJSON-1085002939-project-admin] Task: {'id': task-2086870, 'name': SuspendVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.494850] env[63279]: DEBUG nova.virt.hardware [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1864.495555] env[63279]: DEBUG nova.virt.hardware [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1864.495555] env[63279]: DEBUG nova.virt.hardware [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1864.495720] env[63279]: DEBUG nova.virt.hardware [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1864.495967] env[63279]: DEBUG nova.virt.hardware [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1864.496375] env[63279]: DEBUG nova.virt.hardware [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1864.496943] env[63279]: DEBUG nova.virt.hardware [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1864.496943] env[63279]: DEBUG nova.virt.hardware [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1864.496943] env[63279]: DEBUG nova.virt.hardware [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1864.497145] env[63279]: DEBUG nova.virt.hardware [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1864.497499] env[63279]: DEBUG nova.virt.hardware [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1864.498447] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567459b5-e6b1-4ba7-aaf2-c8ad0b7dc9ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.509988] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecad0c57-8887-4b55-9881-b3a432610640 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.530942] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:73:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d76225b-f1ff-4793-9c29-9a2cd30c6b43', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1864.539319] env[63279]: DEBUG oslo.service.loopingcall [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1864.545130] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1864.545130] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1005d1c9-7fd6-442a-b731-0ad8c01095c3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.569821] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1864.569821] env[63279]: value = "task-2086871" [ 1864.569821] env[63279]: _type = "Task" [ 1864.569821] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.580412] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086871, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.601452] env[63279]: DEBUG nova.network.neutron [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Successfully updated port: ec7f4196-e7c7-425d-aa69-d8a766edef7f {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1864.696230] env[63279]: DEBUG nova.network.neutron [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating instance_info_cache with network_info: [{"id": "2cc4a33a-bd88-4aec-a588-7c821bebf971", "address": "fa:16:3e:de:77:2e", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc4a33a-bd", "ovs_interfaceid": "2cc4a33a-bd88-4aec-a588-7c821bebf971", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1864.803839] env[63279]: DEBUG nova.compute.manager [req-cfcf0596-111e-48d9-9f05-49e5b1b32ede req-7957f03e-567f-485b-9797-83b665ab7c3e service nova] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Received event network-vif-plugged-ec7f4196-e7c7-425d-aa69-d8a766edef7f {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1864.803839] env[63279]: DEBUG oslo_concurrency.lockutils [req-cfcf0596-111e-48d9-9f05-49e5b1b32ede req-7957f03e-567f-485b-9797-83b665ab7c3e service nova] Acquiring lock "5d4be656-defe-4332-b97e-e88b107ca4a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.807323] env[63279]: DEBUG oslo_concurrency.lockutils [req-cfcf0596-111e-48d9-9f05-49e5b1b32ede req-7957f03e-567f-485b-9797-83b665ab7c3e service nova] Lock "5d4be656-defe-4332-b97e-e88b107ca4a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.807323] env[63279]: DEBUG oslo_concurrency.lockutils [req-cfcf0596-111e-48d9-9f05-49e5b1b32ede req-7957f03e-567f-485b-9797-83b665ab7c3e service nova] Lock "5d4be656-defe-4332-b97e-e88b107ca4a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.807323] env[63279]: DEBUG nova.compute.manager [req-cfcf0596-111e-48d9-9f05-49e5b1b32ede req-7957f03e-567f-485b-9797-83b665ab7c3e service nova] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] No waiting events found dispatching network-vif-plugged-ec7f4196-e7c7-425d-aa69-d8a766edef7f {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1864.807323] env[63279]: WARNING nova.compute.manager [req-cfcf0596-111e-48d9-9f05-49e5b1b32ede req-7957f03e-567f-485b-9797-83b665ab7c3e service nova] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Received unexpected event network-vif-plugged-ec7f4196-e7c7-425d-aa69-d8a766edef7f for instance with vm_state building and task_state spawning. [ 1864.909153] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590aaf42-e9d0-4d6c-a144-14db85197673 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.918949] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eee5179-7e52-4822-9c39-7c4e033b24cb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.963970] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997ecd46-4c70-4454-9e55-c4fd91326780 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.975157] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e228a5c1-e1b4-42f8-83d1-7b85a3be24b2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.979458] env[63279]: DEBUG oslo_vmware.api [None req-e9f39d3a-f8ae-4803-b846-690e45084c00 tempest-ServersAdminNegativeTestJSON-1085002939 tempest-ServersAdminNegativeTestJSON-1085002939-project-admin] Task: {'id': task-2086870, 'name': SuspendVM_Task} progress is 62%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.991253] env[63279]: DEBUG nova.compute.provider_tree [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1865.089274] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086871, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.120108] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "refresh_cache-5d4be656-defe-4332-b97e-e88b107ca4a1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.120108] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquired lock "refresh_cache-5d4be656-defe-4332-b97e-e88b107ca4a1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.120108] env[63279]: DEBUG nova.network.neutron [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1865.202172] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Releasing lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.471995] env[63279]: DEBUG oslo_vmware.api [None req-e9f39d3a-f8ae-4803-b846-690e45084c00 tempest-ServersAdminNegativeTestJSON-1085002939 tempest-ServersAdminNegativeTestJSON-1085002939-project-admin] Task: {'id': task-2086870, 'name': SuspendVM_Task, 'duration_secs': 0.814146} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.472295] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e9f39d3a-f8ae-4803-b846-690e45084c00 tempest-ServersAdminNegativeTestJSON-1085002939 tempest-ServersAdminNegativeTestJSON-1085002939-project-admin] [instance: 32e84715-0345-4171-abb7-c034a501347e] Suspended the VM {{(pid=63279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1865.472476] env[63279]: DEBUG nova.compute.manager [None req-e9f39d3a-f8ae-4803-b846-690e45084c00 tempest-ServersAdminNegativeTestJSON-1085002939 tempest-ServersAdminNegativeTestJSON-1085002939-project-admin] [instance: 32e84715-0345-4171-abb7-c034a501347e] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1865.473296] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-836bc5f6-5476-49dd-86eb-840320f9fca8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.497416] env[63279]: DEBUG nova.scheduler.client.report [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1865.584278] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086871, 'name': CreateVM_Task, 'duration_secs': 0.642344} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.585445] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1865.585445] env[63279]: DEBUG oslo_concurrency.lockutils [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.585445] env[63279]: DEBUG oslo_concurrency.lockutils [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.585986] env[63279]: DEBUG oslo_concurrency.lockutils [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1865.586287] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b400d0c9-b089-4235-86b2-bf21360d5880 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.593355] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1865.593355] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ef2c63-e6b3-0e88-e49a-92bfa3fea3dd" [ 1865.593355] env[63279]: _type = "Task" [ 1865.593355] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.604340] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ef2c63-e6b3-0e88-e49a-92bfa3fea3dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.686555] env[63279]: DEBUG nova.network.neutron [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1865.944702] env[63279]: DEBUG nova.network.neutron [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Updating instance_info_cache with network_info: [{"id": "ec7f4196-e7c7-425d-aa69-d8a766edef7f", "address": "fa:16:3e:d8:61:e1", "network": {"id": "6b1ead98-699d-4ad0-bb54-2a4fa09faf6d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-861811231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ea345c4bc9b4781acb6f774ac88c690", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec7f4196-e7", "ovs_interfaceid": "ec7f4196-e7c7-425d-aa69-d8a766edef7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.009598] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.831s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.010283] env[63279]: DEBUG nova.compute.manager [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1866.013331] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.354s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.015180] env[63279]: INFO nova.compute.claims [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1866.109212] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ef2c63-e6b3-0e88-e49a-92bfa3fea3dd, 'name': SearchDatastore_Task, 'duration_secs': 0.021833} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.109552] env[63279]: DEBUG oslo_concurrency.lockutils [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.110012] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1866.110088] env[63279]: DEBUG oslo_concurrency.lockutils [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.110698] env[63279]: DEBUG oslo_concurrency.lockutils [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.110698] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1866.110698] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51d96bd9-ba2d-4cef-b99e-e2cd1615fef7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.124190] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1866.124370] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1866.125141] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dac40a84-1017-4cfa-b8d7-f29659b2a067 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.137088] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Acquiring lock "3f888d81-e73f-4486-bb64-849c873449bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.137343] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Lock "3f888d81-e73f-4486-bb64-849c873449bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.141172] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1866.141172] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52921420-527e-778c-7263-43cb50c62523" [ 1866.141172] env[63279]: _type = "Task" [ 1866.141172] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.154186] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52921420-527e-778c-7263-43cb50c62523, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.451288] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Releasing lock "refresh_cache-5d4be656-defe-4332-b97e-e88b107ca4a1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.451833] env[63279]: DEBUG nova.compute.manager [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Instance network_info: |[{"id": "ec7f4196-e7c7-425d-aa69-d8a766edef7f", "address": "fa:16:3e:d8:61:e1", "network": {"id": "6b1ead98-699d-4ad0-bb54-2a4fa09faf6d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-861811231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ea345c4bc9b4781acb6f774ac88c690", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec7f4196-e7", "ovs_interfaceid": "ec7f4196-e7c7-425d-aa69-d8a766edef7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1866.452422] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:61:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec7f4196-e7c7-425d-aa69-d8a766edef7f', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1866.461555] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Creating folder: Project (3ea345c4bc9b4781acb6f774ac88c690). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1866.462035] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65adf0e1-3dff-4e0a-b1bd-58ee380b7ea5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.476157] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Created folder: Project (3ea345c4bc9b4781acb6f774ac88c690) in parent group-v427491. [ 1866.476687] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Creating folder: Instances. Parent ref: group-v427553. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1866.477176] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dbe4193d-cb31-4230-87e6-52a1688fa140 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.491120] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Created folder: Instances in parent group-v427553. [ 1866.491120] env[63279]: DEBUG oslo.service.loopingcall [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1866.491120] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1866.491120] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a1d3728-7270-4708-a7cb-d9a1f70195a8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.513427] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1866.513427] env[63279]: value = "task-2086874" [ 1866.513427] env[63279]: _type = "Task" [ 1866.513427] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.519856] env[63279]: DEBUG nova.compute.utils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1866.530598] env[63279]: DEBUG nova.compute.manager [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1866.530853] env[63279]: DEBUG nova.network.neutron [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1866.534170] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086874, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.647645] env[63279]: DEBUG nova.policy [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dfe7035705504776b55a88c30e61a010', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba92e8896d7a4605bec96ce7ee7d4a4d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1866.662098] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52921420-527e-778c-7263-43cb50c62523, 'name': SearchDatastore_Task, 'duration_secs': 0.024646} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.662098] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6152ff43-8503-4982-9fe8-994f93cfcf7e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.666766] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1866.666766] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5257f390-a3d0-410f-0a96-2c1f061319d9" [ 1866.666766] env[63279]: _type = "Task" [ 1866.666766] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.684896] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5257f390-a3d0-410f-0a96-2c1f061319d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.694776] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Acquiring lock "ba2d6111-d93d-4216-b641-864b542ea253" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.695050] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lock "ba2d6111-d93d-4216-b641-864b542ea253" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.725414] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e88005-e74f-4a50-b53a-8b5f6101a846 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.753802] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating instance '5656c853-ac83-47be-83c4-979a9e87ab91' progress to 0 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1867.030683] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086874, 'name': CreateVM_Task, 'duration_secs': 0.477064} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.031304] env[63279]: DEBUG nova.compute.manager [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1867.040019] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1867.040019] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.040019] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.040019] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1867.040019] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d7f4a56-3365-49cd-bd9b-65e969658c9d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.046857] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1867.046857] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52edaaf9-d759-db91-171d-640cdbd44887" [ 1867.046857] env[63279]: _type = "Task" [ 1867.046857] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.062445] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52edaaf9-d759-db91-171d-640cdbd44887, 'name': SearchDatastore_Task, 'duration_secs': 0.013478} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.062809] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.063434] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1867.063480] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.179440] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5257f390-a3d0-410f-0a96-2c1f061319d9, 'name': SearchDatastore_Task, 'duration_secs': 0.012729} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.182511] env[63279]: DEBUG oslo_concurrency.lockutils [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.182860] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 41654a82-af5e-463e-80f9-86ba13a5ad2e/41654a82-af5e-463e-80f9-86ba13a5ad2e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1867.183422] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.183638] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1867.183903] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2dd59bcf-c9ba-47b0-92a0-532590ba8f77 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.185972] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32c291cd-d2b3-4a83-a0e4-50eca5e862ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.193889] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1867.193889] env[63279]: value = "task-2086875" [ 1867.193889] env[63279]: _type = "Task" [ 1867.193889] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.199226] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1867.199471] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1867.204249] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a6dd3b6-5d42-4ef9-b3b3-39e0c7a38d90 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.210030] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086875, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.214408] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1867.214408] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a900e2-b659-fdf5-f902-71b9c7d1127a" [ 1867.214408] env[63279]: _type = "Task" [ 1867.214408] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.227550] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a900e2-b659-fdf5-f902-71b9c7d1127a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.259560] env[63279]: DEBUG nova.compute.manager [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1867.261406] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1867.268145] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831d429f-1c2d-4927-93be-844e983f823d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.270985] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23aa8f39-fc86-4dfb-a3f8-bfffb9a5d094 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.285502] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 1867.285502] env[63279]: value = "task-2086876" [ 1867.285502] env[63279]: _type = "Task" [ 1867.285502] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.303586] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086876, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.318053] env[63279]: DEBUG nova.network.neutron [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Successfully created port: 4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1867.434113] env[63279]: DEBUG nova.compute.manager [req-608b7232-4f9b-4fd2-a5a8-05a38f00a9bc req-69c5e455-4b85-4cb1-8947-26ce1074060c service nova] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Received event network-changed-ec7f4196-e7c7-425d-aa69-d8a766edef7f {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1867.435230] env[63279]: DEBUG nova.compute.manager [req-608b7232-4f9b-4fd2-a5a8-05a38f00a9bc req-69c5e455-4b85-4cb1-8947-26ce1074060c service nova] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Refreshing instance network info cache due to event network-changed-ec7f4196-e7c7-425d-aa69-d8a766edef7f. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1867.435230] env[63279]: DEBUG oslo_concurrency.lockutils [req-608b7232-4f9b-4fd2-a5a8-05a38f00a9bc req-69c5e455-4b85-4cb1-8947-26ce1074060c service nova] Acquiring lock "refresh_cache-5d4be656-defe-4332-b97e-e88b107ca4a1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.435230] env[63279]: DEBUG oslo_concurrency.lockutils [req-608b7232-4f9b-4fd2-a5a8-05a38f00a9bc req-69c5e455-4b85-4cb1-8947-26ce1074060c service nova] Acquired lock "refresh_cache-5d4be656-defe-4332-b97e-e88b107ca4a1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.435230] env[63279]: DEBUG nova.network.neutron [req-608b7232-4f9b-4fd2-a5a8-05a38f00a9bc req-69c5e455-4b85-4cb1-8947-26ce1074060c service nova] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Refreshing network info cache for port ec7f4196-e7c7-425d-aa69-d8a766edef7f {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1867.706429] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086875, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.725262] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a900e2-b659-fdf5-f902-71b9c7d1127a, 'name': SearchDatastore_Task, 'duration_secs': 0.01027} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.725857] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-625ad803-66fe-47b9-9307-3d45a925899b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.735574] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1867.735574] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52be89b9-78b6-8c95-bccd-7eb8e409db2e" [ 1867.735574] env[63279]: _type = "Task" [ 1867.735574] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.745342] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52be89b9-78b6-8c95-bccd-7eb8e409db2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.779746] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440da5e1-1feb-426c-ae06-75deaede0dd7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.788066] env[63279]: INFO nova.compute.manager [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] instance snapshotting [ 1867.790558] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789f6793-633e-44d5-a1fe-281df8079ac7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.797215] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5ba4bf-44b4-4286-b1ef-578108b8cfe8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.806381] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086876, 'name': PowerOffVM_Task, 'duration_secs': 0.382684} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.844592] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1867.844592] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating instance '5656c853-ac83-47be-83c4-979a9e87ab91' progress to 17 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1867.852292] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6ab474-1419-4d9d-b1b1-c66f5ff54762 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.860155] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf37f77-bbe1-4b66-bd71-a4e51193b824 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.867950] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ae0df3-d2a6-4240-8ca2-ffb9967744b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.886890] env[63279]: DEBUG nova.compute.provider_tree [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1868.045691] env[63279]: DEBUG nova.compute.manager [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1868.088788] env[63279]: DEBUG nova.virt.hardware [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1868.088788] env[63279]: DEBUG nova.virt.hardware [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1868.088937] env[63279]: DEBUG nova.virt.hardware [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1868.089137] env[63279]: DEBUG nova.virt.hardware [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1868.091224] env[63279]: DEBUG nova.virt.hardware [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1868.091224] env[63279]: DEBUG nova.virt.hardware [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1868.091224] env[63279]: DEBUG nova.virt.hardware [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1868.091224] env[63279]: DEBUG nova.virt.hardware [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1868.091224] env[63279]: DEBUG nova.virt.hardware [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1868.091467] env[63279]: DEBUG nova.virt.hardware [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1868.091467] env[63279]: DEBUG nova.virt.hardware [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1868.091467] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65531e71-2de2-4d5e-a1d7-330c7f4c8a91 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.106847] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00be469a-0934-43ec-94ca-14abe785881f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.212308] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086875, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555088} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.212634] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 41654a82-af5e-463e-80f9-86ba13a5ad2e/41654a82-af5e-463e-80f9-86ba13a5ad2e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1868.212860] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1868.216367] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ad7250e-6a8f-49e3-a06a-d7e5bde58126 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.222477] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1868.222477] env[63279]: value = "task-2086877" [ 1868.222477] env[63279]: _type = "Task" [ 1868.222477] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.233095] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086877, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.249151] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52be89b9-78b6-8c95-bccd-7eb8e409db2e, 'name': SearchDatastore_Task, 'duration_secs': 0.023412} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.249442] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.250274] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 5d4be656-defe-4332-b97e-e88b107ca4a1/5d4be656-defe-4332-b97e-e88b107ca4a1.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1868.250575] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e7c474a-8d90-4a5d-b77d-9c0e95643d46 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.265049] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1868.265049] env[63279]: value = "task-2086878" [ 1868.265049] env[63279]: _type = "Task" [ 1868.265049] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.269280] env[63279]: DEBUG nova.network.neutron [req-608b7232-4f9b-4fd2-a5a8-05a38f00a9bc req-69c5e455-4b85-4cb1-8947-26ce1074060c service nova] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Updated VIF entry in instance network info cache for port ec7f4196-e7c7-425d-aa69-d8a766edef7f. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1868.270077] env[63279]: DEBUG nova.network.neutron [req-608b7232-4f9b-4fd2-a5a8-05a38f00a9bc req-69c5e455-4b85-4cb1-8947-26ce1074060c service nova] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Updating instance_info_cache with network_info: [{"id": "ec7f4196-e7c7-425d-aa69-d8a766edef7f", "address": "fa:16:3e:d8:61:e1", "network": {"id": "6b1ead98-699d-4ad0-bb54-2a4fa09faf6d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-861811231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ea345c4bc9b4781acb6f774ac88c690", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec7f4196-e7", "ovs_interfaceid": "ec7f4196-e7c7-425d-aa69-d8a766edef7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.278675] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086878, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.354879] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1868.355324] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1868.355416] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1868.355667] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1868.355849] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1868.356128] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1868.356871] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1868.356871] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1868.356871] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1868.356871] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1868.357098] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1868.363022] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7602b19d-8b51-4cc1-86b7-c844bc7124ea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.377126] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1868.377555] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1653cb82-c654-48fa-a957-cda0567754f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.382355] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 1868.382355] env[63279]: value = "task-2086879" [ 1868.382355] env[63279]: _type = "Task" [ 1868.382355] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.387141] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1868.387141] env[63279]: value = "task-2086880" [ 1868.387141] env[63279]: _type = "Task" [ 1868.387141] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.399614] env[63279]: DEBUG nova.scheduler.client.report [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1868.411142] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086879, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.416389] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086880, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.734411] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086877, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082804} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.734731] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1868.735657] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c8459b-ea38-4e8a-be6d-bfe189e6a05c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.758638] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 41654a82-af5e-463e-80f9-86ba13a5ad2e/41654a82-af5e-463e-80f9-86ba13a5ad2e.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1868.758935] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd83f989-12be-48f1-bb8c-b3c68c0d2d2c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.774253] env[63279]: DEBUG oslo_concurrency.lockutils [req-608b7232-4f9b-4fd2-a5a8-05a38f00a9bc req-69c5e455-4b85-4cb1-8947-26ce1074060c service nova] Releasing lock "refresh_cache-5d4be656-defe-4332-b97e-e88b107ca4a1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.790026] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086878, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.790026] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1868.790026] env[63279]: value = "task-2086881" [ 1868.790026] env[63279]: _type = "Task" [ 1868.790026] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.799515] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086881, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.896161] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086879, 'name': ReconfigVM_Task, 'duration_secs': 0.232235} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.896708] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating instance '5656c853-ac83-47be-83c4-979a9e87ab91' progress to 33 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1868.904793] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086880, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.918064] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.905s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.918983] env[63279]: DEBUG nova.compute.manager [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1868.923011] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.174s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.923982] env[63279]: INFO nova.compute.claims [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1869.209711] env[63279]: DEBUG nova.network.neutron [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Successfully updated port: 4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1869.284910] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086878, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.786733} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.285579] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 5d4be656-defe-4332-b97e-e88b107ca4a1/5d4be656-defe-4332-b97e-e88b107ca4a1.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1869.285978] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1869.286366] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e10f8fd-9238-4405-8d42-4f1335bedd4f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.298462] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086881, 'name': ReconfigVM_Task, 'duration_secs': 0.482819} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.300468] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 41654a82-af5e-463e-80f9-86ba13a5ad2e/41654a82-af5e-463e-80f9-86ba13a5ad2e.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1869.301179] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1869.301179] env[63279]: value = "task-2086882" [ 1869.301179] env[63279]: _type = "Task" [ 1869.301179] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.301384] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6d42b9b-5210-4ce2-8b25-ed3ed39fe0b6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.311931] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086882, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.313590] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1869.313590] env[63279]: value = "task-2086883" [ 1869.313590] env[63279]: _type = "Task" [ 1869.313590] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.323818] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086883, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.399429] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086880, 'name': CreateSnapshot_Task, 'duration_secs': 0.698246} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.399718] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1869.400497] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eaa8b06-578d-4805-aae5-5cae564c95f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.407624] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1869.407624] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1869.407624] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1869.407815] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1869.407943] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1869.408106] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1869.408362] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1869.408561] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1869.408742] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1869.408913] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1869.409226] env[63279]: DEBUG nova.virt.hardware [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1869.414980] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Reconfiguring VM instance instance-00000009 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1869.424365] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c142a20-94fc-4cd5-97c7-77e35e137df7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.451719] env[63279]: DEBUG nova.compute.utils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1869.456507] env[63279]: DEBUG nova.compute.manager [req-66946d77-784e-4363-b318-965af6101661 req-a24539ab-9e62-4eae-b938-f9cc36ef4760 service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Received event network-vif-plugged-4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1869.456758] env[63279]: DEBUG oslo_concurrency.lockutils [req-66946d77-784e-4363-b318-965af6101661 req-a24539ab-9e62-4eae-b938-f9cc36ef4760 service nova] Acquiring lock "357f08c9-4de9-4b84-8384-6bf130872f40-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.456999] env[63279]: DEBUG oslo_concurrency.lockutils [req-66946d77-784e-4363-b318-965af6101661 req-a24539ab-9e62-4eae-b938-f9cc36ef4760 service nova] Lock "357f08c9-4de9-4b84-8384-6bf130872f40-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.457210] env[63279]: DEBUG oslo_concurrency.lockutils [req-66946d77-784e-4363-b318-965af6101661 req-a24539ab-9e62-4eae-b938-f9cc36ef4760 service nova] Lock "357f08c9-4de9-4b84-8384-6bf130872f40-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.457386] env[63279]: DEBUG nova.compute.manager [req-66946d77-784e-4363-b318-965af6101661 req-a24539ab-9e62-4eae-b938-f9cc36ef4760 service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] No waiting events found dispatching network-vif-plugged-4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1869.457556] env[63279]: WARNING nova.compute.manager [req-66946d77-784e-4363-b318-965af6101661 req-a24539ab-9e62-4eae-b938-f9cc36ef4760 service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Received unexpected event network-vif-plugged-4160b9e6-5e90-458c-bb0f-afc6be383dc1 for instance with vm_state building and task_state spawning. [ 1869.458379] env[63279]: DEBUG nova.compute.manager [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1869.458556] env[63279]: DEBUG nova.network.neutron [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1869.469919] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 1869.469919] env[63279]: value = "task-2086884" [ 1869.469919] env[63279]: _type = "Task" [ 1869.469919] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.483282] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086884, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.515509] env[63279]: DEBUG nova.policy [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05d54932ba5a43e5ab9cda4a7912aa3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e271c0d8278c4192950ebc988485dd5f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1869.712253] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.713307] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquired lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.713307] env[63279]: DEBUG nova.network.neutron [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1869.794428] env[63279]: DEBUG oslo_concurrency.lockutils [None req-930e8b72-e313-47d3-971d-85e95e45c359 tempest-ServersListShow296Test-756193091 tempest-ServersListShow296Test-756193091-project-member] Acquiring lock "d7eea629-0c82-4f56-8a6c-86d18a70814d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.794810] env[63279]: DEBUG oslo_concurrency.lockutils [None req-930e8b72-e313-47d3-971d-85e95e45c359 tempest-ServersListShow296Test-756193091 tempest-ServersListShow296Test-756193091-project-member] Lock "d7eea629-0c82-4f56-8a6c-86d18a70814d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.815898] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086882, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07455} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.821117] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1869.822202] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91a1f0a-e9d1-45c3-8bbc-db4004bb1efc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.831460] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086883, 'name': Rename_Task, 'duration_secs': 0.470225} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.844464] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1869.845555] env[63279]: DEBUG nova.compute.manager [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1869.854197] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 5d4be656-defe-4332-b97e-e88b107ca4a1/5d4be656-defe-4332-b97e-e88b107ca4a1.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1869.855217] env[63279]: DEBUG nova.network.neutron [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Successfully created port: bbcf46a8-cec2-4c54-8c6a-c1ba126b1676 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1869.857119] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6be7446-2e1c-4d5a-9db1-b64e03e96c8f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.859397] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa789e4-d39a-4c85-a645-6fcf616e59d8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.861789] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c98e28ba-6b86-43a9-a2c5-bdcfa18430a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.887320] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1869.887320] env[63279]: value = "task-2086885" [ 1869.887320] env[63279]: _type = "Task" [ 1869.887320] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.887601] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1869.887601] env[63279]: value = "task-2086886" [ 1869.887601] env[63279]: _type = "Task" [ 1869.887601] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.898979] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086885, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.904992] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086886, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.955013] env[63279]: DEBUG nova.compute.manager [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1869.968412] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1869.969273] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9933aead-22e6-46c4-89df-bb53364b105b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.985096] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086884, 'name': ReconfigVM_Task, 'duration_secs': 0.511111} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.986615] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Reconfigured VM instance instance-00000009 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1869.987039] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1869.987039] env[63279]: value = "task-2086887" [ 1869.987039] env[63279]: _type = "Task" [ 1869.987039] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.990902] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a5b265-d4d6-47c8-95dc-c66d32312b93 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.005525] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086887, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.023648] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 5656c853-ac83-47be-83c4-979a9e87ab91/5656c853-ac83-47be-83c4-979a9e87ab91.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1870.027405] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b084fb1d-f3d3-483f-9e67-8edb2f644598 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.050174] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 1870.050174] env[63279]: value = "task-2086888" [ 1870.050174] env[63279]: _type = "Task" [ 1870.050174] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.063195] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086888, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.265424] env[63279]: DEBUG nova.network.neutron [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1870.389315] env[63279]: INFO nova.compute.manager [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] instance snapshotting [ 1870.402977] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2afe8d8-ae55-43fe-b49a-0b90f5bfa7e1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.425100] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086886, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.430712] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447996a1-6af5-45f7-8b45-ca625c4eb326 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.433397] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086885, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.486537] env[63279]: DEBUG nova.network.neutron [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Updating instance_info_cache with network_info: [{"id": "4160b9e6-5e90-458c-bb0f-afc6be383dc1", "address": "fa:16:3e:0b:30:a6", "network": {"id": "948d327b-554a-4c1d-a483-9a067d60f6bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1383523654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba92e8896d7a4605bec96ce7ee7d4a4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4160b9e6-5e", "ovs_interfaceid": "4160b9e6-5e90-458c-bb0f-afc6be383dc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.513159] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086887, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.561963] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086888, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.631952] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ded3020-3b3a-46fd-99d8-1dea57bb23e6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.642041] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75df2e73-b5a0-4dd4-a88c-150ee0b5e54e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.674685] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5e7c1f-c888-43d2-9798-6e31122b9ab6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.685199] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1060f1c6-97d8-4c20-8f64-efe7b93800fd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.702162] env[63279]: DEBUG nova.compute.provider_tree [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1870.913392] env[63279]: DEBUG oslo_vmware.api [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086885, 'name': PowerOnVM_Task, 'duration_secs': 0.835342} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.913392] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086886, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.913392] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1870.913392] env[63279]: DEBUG nova.compute.manager [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1870.913392] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d6d1a0-d0dd-48e2-8f95-de1b58342a01 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.945968] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1870.946379] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-92133660-ac81-4bb9-a3d2-cfb59c0a5a96 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.957802] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 1870.957802] env[63279]: value = "task-2086889" [ 1870.957802] env[63279]: _type = "Task" [ 1870.957802] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.970450] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086889, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.970450] env[63279]: DEBUG nova.compute.manager [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1870.992229] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Releasing lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.992548] env[63279]: DEBUG nova.compute.manager [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Instance network_info: |[{"id": "4160b9e6-5e90-458c-bb0f-afc6be383dc1", "address": "fa:16:3e:0b:30:a6", "network": {"id": "948d327b-554a-4c1d-a483-9a067d60f6bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1383523654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba92e8896d7a4605bec96ce7ee7d4a4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4160b9e6-5e", "ovs_interfaceid": "4160b9e6-5e90-458c-bb0f-afc6be383dc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1870.995179] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:30:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '471f65a5-21ea-45e3-a722-4e204ed65673', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4160b9e6-5e90-458c-bb0f-afc6be383dc1', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1871.000933] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Creating folder: Project (ba92e8896d7a4605bec96ce7ee7d4a4d). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1871.004082] env[63279]: DEBUG nova.virt.hardware [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1871.004402] env[63279]: DEBUG nova.virt.hardware [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1871.004596] env[63279]: DEBUG nova.virt.hardware [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1871.004813] env[63279]: DEBUG nova.virt.hardware [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1871.004981] env[63279]: DEBUG nova.virt.hardware [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1871.005181] env[63279]: DEBUG nova.virt.hardware [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1871.005570] env[63279]: DEBUG nova.virt.hardware [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1871.005829] env[63279]: DEBUG nova.virt.hardware [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1871.006048] env[63279]: DEBUG nova.virt.hardware [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1871.006274] env[63279]: DEBUG nova.virt.hardware [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1871.006487] env[63279]: DEBUG nova.virt.hardware [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1871.006822] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4e464a5-498d-4229-9815-f851c508ca58 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.009405] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd559c9-609b-40fc-8194-6726a484ffcd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.021325] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086887, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.024448] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd5699f-5089-4ada-94e4-8294bba1cbc8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.030303] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Created folder: Project (ba92e8896d7a4605bec96ce7ee7d4a4d) in parent group-v427491. [ 1871.030503] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Creating folder: Instances. Parent ref: group-v427558. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1871.031102] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3e3e254-c639-47e2-ab08-a5652c4c386d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.042669] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Created folder: Instances in parent group-v427558. [ 1871.042669] env[63279]: DEBUG oslo.service.loopingcall [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1871.042669] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1871.042669] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2baf237-da6b-4cae-bc98-7b3bb9047a4d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.066064] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086888, 'name': ReconfigVM_Task, 'duration_secs': 0.584374} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.067469] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 5656c853-ac83-47be-83c4-979a9e87ab91/5656c853-ac83-47be-83c4-979a9e87ab91.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1871.067734] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating instance '5656c853-ac83-47be-83c4-979a9e87ab91' progress to 50 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1871.070995] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1871.070995] env[63279]: value = "task-2086892" [ 1871.070995] env[63279]: _type = "Task" [ 1871.070995] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.081013] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086892, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.231046] env[63279]: ERROR nova.scheduler.client.report [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [req-7a704355-a8b3-4aa2-bfad-434b005603f6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7a704355-a8b3-4aa2-bfad-434b005603f6"}]} [ 1871.252404] env[63279]: DEBUG nova.scheduler.client.report [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1871.271110] env[63279]: DEBUG nova.scheduler.client.report [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1871.271403] env[63279]: DEBUG nova.compute.provider_tree [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1871.287746] env[63279]: DEBUG nova.scheduler.client.report [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1871.313899] env[63279]: DEBUG nova.scheduler.client.report [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1871.406606] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086886, 'name': ReconfigVM_Task, 'duration_secs': 1.350596} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.406946] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 5d4be656-defe-4332-b97e-e88b107ca4a1/5d4be656-defe-4332-b97e-e88b107ca4a1.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1871.407649] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e45cb50-a1b0-4395-af0e-47cffe2119b8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.420654] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1871.420654] env[63279]: value = "task-2086893" [ 1871.420654] env[63279]: _type = "Task" [ 1871.420654] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.443036] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086893, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.445704] env[63279]: DEBUG oslo_concurrency.lockutils [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.479981] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086889, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.519760] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086887, 'name': CloneVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.582852] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd92cc2d-fe2c-4c1c-9e9b-6e059166c940 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.594571] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086892, 'name': CreateVM_Task, 'duration_secs': 0.493755} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.608160] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1871.615086] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.615086] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.615086] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1871.615086] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea8602b-c9bf-439f-83e7-024fe8cc17b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.618838] env[63279]: DEBUG nova.compute.manager [req-a34bf990-9837-4b6a-99be-33397dd8b772 req-15c6c5c5-76ff-4e77-985b-b949dfaea635 service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Received event network-changed-4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1871.619040] env[63279]: DEBUG nova.compute.manager [req-a34bf990-9837-4b6a-99be-33397dd8b772 req-15c6c5c5-76ff-4e77-985b-b949dfaea635 service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Refreshing instance network info cache due to event network-changed-4160b9e6-5e90-458c-bb0f-afc6be383dc1. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1871.619256] env[63279]: DEBUG oslo_concurrency.lockutils [req-a34bf990-9837-4b6a-99be-33397dd8b772 req-15c6c5c5-76ff-4e77-985b-b949dfaea635 service nova] Acquiring lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.619394] env[63279]: DEBUG oslo_concurrency.lockutils [req-a34bf990-9837-4b6a-99be-33397dd8b772 req-15c6c5c5-76ff-4e77-985b-b949dfaea635 service nova] Acquired lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.619553] env[63279]: DEBUG nova.network.neutron [req-a34bf990-9837-4b6a-99be-33397dd8b772 req-15c6c5c5-76ff-4e77-985b-b949dfaea635 service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Refreshing network info cache for port 4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1871.621360] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47d5ee7f-aedf-40b3-90c1-270287800b65 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.642036] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating instance '5656c853-ac83-47be-83c4-979a9e87ab91' progress to 67 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1871.649769] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1871.649769] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52887059-73a8-b631-01f9-1114628ed8f3" [ 1871.649769] env[63279]: _type = "Task" [ 1871.649769] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.662294] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52887059-73a8-b631-01f9-1114628ed8f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.764487] env[63279]: DEBUG nova.network.neutron [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Successfully updated port: bbcf46a8-cec2-4c54-8c6a-c1ba126b1676 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1871.932292] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086893, 'name': Rename_Task, 'duration_secs': 0.17506} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.933598] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1871.934295] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b861db-9366-433d-aa68-78e3155e8e03 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.936983] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72b47898-356d-4dde-b4d1-0f6d04d253e3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.945054] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170c8637-04fb-4884-9aa0-16e3d04fe06e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.949908] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1871.949908] env[63279]: value = "task-2086894" [ 1871.949908] env[63279]: _type = "Task" [ 1871.949908] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.950723] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "32e84715-0345-4171-abb7-c034a501347e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.950966] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "32e84715-0345-4171-abb7-c034a501347e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.951184] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "32e84715-0345-4171-abb7-c034a501347e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.951398] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "32e84715-0345-4171-abb7-c034a501347e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.952360] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "32e84715-0345-4171-abb7-c034a501347e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.957213] env[63279]: INFO nova.compute.manager [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Terminating instance [ 1871.988744] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11730204-c052-450c-b490-f0c030a0b167 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.995894] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086894, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.004721] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a769bd-60a0-4313-aa2e-9592f27fe17c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.008872] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086889, 'name': CreateSnapshot_Task, 'duration_secs': 0.658476} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.009521] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1872.013540] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467e7717-3014-48fb-81aa-23a73780a741 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.024101] env[63279]: DEBUG nova.compute.provider_tree [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1872.031366] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086887, 'name': CloneVM_Task, 'duration_secs': 1.879109} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.034566] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Created linked-clone VM from snapshot [ 1872.037953] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c43794-1e58-4f6e-a284-8ac3a3f82876 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.046828] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Uploading image 164c17e3-3c50-4370-ab43-45bf9ea752e5 {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1872.075932] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1872.075932] env[63279]: value = "vm-427557" [ 1872.075932] env[63279]: _type = "VirtualMachine" [ 1872.075932] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1872.076609] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-91bef987-bd5b-4ff9-a22b-e265953878a2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.085325] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Lease: (returnval){ [ 1872.085325] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529c7069-4791-fceb-5cbd-6917fc2a2852" [ 1872.085325] env[63279]: _type = "HttpNfcLease" [ 1872.085325] env[63279]: } obtained for exporting VM: (result){ [ 1872.085325] env[63279]: value = "vm-427557" [ 1872.085325] env[63279]: _type = "VirtualMachine" [ 1872.085325] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1872.085563] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the lease: (returnval){ [ 1872.085563] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529c7069-4791-fceb-5cbd-6917fc2a2852" [ 1872.085563] env[63279]: _type = "HttpNfcLease" [ 1872.085563] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1872.095349] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1872.095349] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529c7069-4791-fceb-5cbd-6917fc2a2852" [ 1872.095349] env[63279]: _type = "HttpNfcLease" [ 1872.095349] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1872.169507] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52887059-73a8-b631-01f9-1114628ed8f3, 'name': SearchDatastore_Task, 'duration_secs': 0.051868} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.169809] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.170062] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1872.170320] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1872.170453] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1872.170630] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1872.170895] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8af2c301-4815-4356-a076-43135aa4e2d7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.182460] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1872.182662] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1872.183411] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3581a20-4d04-489b-81e8-af873fd1cfe0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.189672] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1872.189672] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527f026b-efba-46ff-09a1-dbcad669c58d" [ 1872.189672] env[63279]: _type = "Task" [ 1872.189672] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.198912] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527f026b-efba-46ff-09a1-dbcad669c58d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.199703] env[63279]: DEBUG nova.network.neutron [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Port 2cc4a33a-bd88-4aec-a588-7c821bebf971 binding to destination host cpu-1 is already ACTIVE {{(pid=63279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1872.267324] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Acquiring lock "refresh_cache-ecedded1-7169-49a4-8a9e-2fe4086db986" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1872.267488] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Acquired lock "refresh_cache-ecedded1-7169-49a4-8a9e-2fe4086db986" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1872.267647] env[63279]: DEBUG nova.network.neutron [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1872.384818] env[63279]: DEBUG nova.network.neutron [req-a34bf990-9837-4b6a-99be-33397dd8b772 req-15c6c5c5-76ff-4e77-985b-b949dfaea635 service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Updated VIF entry in instance network info cache for port 4160b9e6-5e90-458c-bb0f-afc6be383dc1. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1872.385205] env[63279]: DEBUG nova.network.neutron [req-a34bf990-9837-4b6a-99be-33397dd8b772 req-15c6c5c5-76ff-4e77-985b-b949dfaea635 service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Updating instance_info_cache with network_info: [{"id": "4160b9e6-5e90-458c-bb0f-afc6be383dc1", "address": "fa:16:3e:0b:30:a6", "network": {"id": "948d327b-554a-4c1d-a483-9a067d60f6bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1383523654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba92e8896d7a4605bec96ce7ee7d4a4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4160b9e6-5e", "ovs_interfaceid": "4160b9e6-5e90-458c-bb0f-afc6be383dc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.461316] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086894, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.495674] env[63279]: DEBUG nova.compute.manager [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1872.495893] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1872.496951] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420b642a-ed64-4847-8e4c-edd33de07652 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.505248] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1872.505525] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00eaf14f-0468-456a-ac03-cc0993d19041 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.549457] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1872.549996] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1a0a5165-685c-41cb-a857-c004e2adedad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.561666] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 1872.561666] env[63279]: value = "task-2086897" [ 1872.561666] env[63279]: _type = "Task" [ 1872.561666] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.566359] env[63279]: DEBUG nova.scheduler.client.report [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 53 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1872.566613] env[63279]: DEBUG nova.compute.provider_tree [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 53 to 54 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1872.566796] env[63279]: DEBUG nova.compute.provider_tree [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1872.574710] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086897, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.598481] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1872.598481] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529c7069-4791-fceb-5cbd-6917fc2a2852" [ 1872.598481] env[63279]: _type = "HttpNfcLease" [ 1872.598481] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1872.598719] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1872.598719] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529c7069-4791-fceb-5cbd-6917fc2a2852" [ 1872.598719] env[63279]: _type = "HttpNfcLease" [ 1872.598719] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1872.599794] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6baf0c1c-bec6-4ff4-885d-9611c17baf4f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.608120] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1872.608120] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1872.608120] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Deleting the datastore file [datastore1] 32e84715-0345-4171-abb7-c034a501347e {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1872.608120] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26825211-23a5-4076-b1a5-4ae4bd9696a8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.611894] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e0f1db-9f73-a702-251e-169ec188cf05/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1872.612346] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e0f1db-9f73-a702-251e-169ec188cf05/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1872.674596] env[63279]: DEBUG oslo_vmware.api [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1872.674596] env[63279]: value = "task-2086898" [ 1872.674596] env[63279]: _type = "Task" [ 1872.674596] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.686258] env[63279]: DEBUG oslo_vmware.api [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086898, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.706684] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527f026b-efba-46ff-09a1-dbcad669c58d, 'name': SearchDatastore_Task, 'duration_secs': 0.016294} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.708163] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df9846a0-7513-459c-bcb4-f1350c6edf4b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.720201] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-53051b31-f2ca-4c32-a0ca-a9a28e03ede7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.725556] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1872.725556] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52353919-7daf-fd56-79fd-e51b196ba85b" [ 1872.725556] env[63279]: _type = "Task" [ 1872.725556] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.741262] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52353919-7daf-fd56-79fd-e51b196ba85b, 'name': SearchDatastore_Task, 'duration_secs': 0.018584} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.741884] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.742163] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 357f08c9-4de9-4b84-8384-6bf130872f40/357f08c9-4de9-4b84-8384-6bf130872f40.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1872.742606] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c221ed0-05bc-4f37-8ebb-71db8926ab3c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.759611] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1872.759611] env[63279]: value = "task-2086899" [ 1872.759611] env[63279]: _type = "Task" [ 1872.759611] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.770463] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2086899, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.827818] env[63279]: DEBUG nova.network.neutron [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1872.844674] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "41654a82-af5e-463e-80f9-86ba13a5ad2e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.846357] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "41654a82-af5e-463e-80f9-86ba13a5ad2e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.846357] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "41654a82-af5e-463e-80f9-86ba13a5ad2e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.846357] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "41654a82-af5e-463e-80f9-86ba13a5ad2e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.846357] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "41654a82-af5e-463e-80f9-86ba13a5ad2e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.850061] env[63279]: INFO nova.compute.manager [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Terminating instance [ 1872.888221] env[63279]: DEBUG oslo_concurrency.lockutils [req-a34bf990-9837-4b6a-99be-33397dd8b772 req-15c6c5c5-76ff-4e77-985b-b949dfaea635 service nova] Releasing lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.969024] env[63279]: DEBUG oslo_vmware.api [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086894, 'name': PowerOnVM_Task, 'duration_secs': 0.753939} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.969024] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1872.969256] env[63279]: INFO nova.compute.manager [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Took 10.53 seconds to spawn the instance on the hypervisor. [ 1872.969704] env[63279]: DEBUG nova.compute.manager [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1872.971049] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9783a1ba-be11-4a89-b3c2-fb51d804bf98 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.043716] env[63279]: DEBUG nova.network.neutron [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Updating instance_info_cache with network_info: [{"id": "bbcf46a8-cec2-4c54-8c6a-c1ba126b1676", "address": "fa:16:3e:e1:30:df", "network": {"id": "08d905b3-a951-4c93-b0ed-a9e73ab6b443", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1358149569-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e271c0d8278c4192950ebc988485dd5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcf46a8-ce", "ovs_interfaceid": "bbcf46a8-cec2-4c54-8c6a-c1ba126b1676", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1873.074486] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086897, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.075497] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.153s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.075897] env[63279]: DEBUG nova.compute.manager [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1873.078594] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 28.852s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.078825] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.078917] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1873.079240] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.419s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.080872] env[63279]: INFO nova.compute.claims [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1873.085746] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71ac8e2-6963-4288-b6fd-5b050ff774d7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.094748] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d433dc-2dec-40e9-a150-b7a7910597d7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.110594] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb13d7c-d135-4c03-a677-f93d0ddf0037 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.118913] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44abaa0e-a991-4131-86c2-30ec4524c6f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.155152] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181193MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1873.155332] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.187341] env[63279]: DEBUG oslo_vmware.api [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086898, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308223} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.187341] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1873.187341] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1873.187341] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1873.187341] env[63279]: INFO nova.compute.manager [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 32e84715-0345-4171-abb7-c034a501347e] Took 0.69 seconds to destroy the instance on the hypervisor. [ 1873.187691] env[63279]: DEBUG oslo.service.loopingcall [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1873.187691] env[63279]: DEBUG nova.compute.manager [-] [instance: 32e84715-0345-4171-abb7-c034a501347e] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1873.187691] env[63279]: DEBUG nova.network.neutron [-] [instance: 32e84715-0345-4171-abb7-c034a501347e] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1873.226402] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "5656c853-ac83-47be-83c4-979a9e87ab91-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.226788] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "5656c853-ac83-47be-83c4-979a9e87ab91-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.226788] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "5656c853-ac83-47be-83c4-979a9e87ab91-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.272671] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2086899, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.356025] env[63279]: DEBUG nova.compute.manager [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1873.356025] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1873.356025] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a718aa-12fd-4738-b007-9a1546294030 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.366625] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1873.367366] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2cea02e4-9635-4a8f-9a69-52f4b320b108 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.377607] env[63279]: DEBUG oslo_vmware.api [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1873.377607] env[63279]: value = "task-2086900" [ 1873.377607] env[63279]: _type = "Task" [ 1873.377607] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.385609] env[63279]: DEBUG oslo_vmware.api [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086900, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.409339] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "ad435281-55a0-418a-8400-5c461a5c15ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.410593] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "ad435281-55a0-418a-8400-5c461a5c15ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.499938] env[63279]: INFO nova.compute.manager [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Took 38.19 seconds to build instance. [ 1873.545900] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Releasing lock "refresh_cache-ecedded1-7169-49a4-8a9e-2fe4086db986" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1873.545900] env[63279]: DEBUG nova.compute.manager [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Instance network_info: |[{"id": "bbcf46a8-cec2-4c54-8c6a-c1ba126b1676", "address": "fa:16:3e:e1:30:df", "network": {"id": "08d905b3-a951-4c93-b0ed-a9e73ab6b443", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1358149569-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e271c0d8278c4192950ebc988485dd5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcf46a8-ce", "ovs_interfaceid": "bbcf46a8-cec2-4c54-8c6a-c1ba126b1676", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1873.547909] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:30:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b2049d7-f99e-425a-afdb-2c95ca88e483', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bbcf46a8-cec2-4c54-8c6a-c1ba126b1676', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1873.558632] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Creating folder: Project (e271c0d8278c4192950ebc988485dd5f). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1873.565729] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-846f2fe3-f9db-40e6-9b12-082a4005f93a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.577645] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086897, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.579987] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Created folder: Project (e271c0d8278c4192950ebc988485dd5f) in parent group-v427491. [ 1873.580229] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Creating folder: Instances. Parent ref: group-v427563. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1873.580494] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d34cd1ab-e2ae-47e8-800f-62f5b19fbfaa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.590704] env[63279]: DEBUG nova.compute.utils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1873.592391] env[63279]: DEBUG nova.compute.manager [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1873.592700] env[63279]: DEBUG nova.network.neutron [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1873.595167] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Created folder: Instances in parent group-v427563. [ 1873.595484] env[63279]: DEBUG oslo.service.loopingcall [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1873.596763] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1873.597639] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc01ed69-1a9c-4e5c-b162-6ec95b87a26f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.626020] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1873.626020] env[63279]: value = "task-2086903" [ 1873.626020] env[63279]: _type = "Task" [ 1873.626020] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.634163] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086903, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.657343] env[63279]: DEBUG nova.policy [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92ab38bbf74d4cccb2637786a651ed5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ea345c4bc9b4781acb6f774ac88c690', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1873.693383] env[63279]: DEBUG nova.compute.manager [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Received event network-vif-plugged-bbcf46a8-cec2-4c54-8c6a-c1ba126b1676 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1873.693914] env[63279]: DEBUG oslo_concurrency.lockutils [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] Acquiring lock "ecedded1-7169-49a4-8a9e-2fe4086db986-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.697086] env[63279]: DEBUG oslo_concurrency.lockutils [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] Lock "ecedded1-7169-49a4-8a9e-2fe4086db986-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.697086] env[63279]: DEBUG oslo_concurrency.lockutils [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] Lock "ecedded1-7169-49a4-8a9e-2fe4086db986-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.697086] env[63279]: DEBUG nova.compute.manager [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] No waiting events found dispatching network-vif-plugged-bbcf46a8-cec2-4c54-8c6a-c1ba126b1676 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1873.697086] env[63279]: WARNING nova.compute.manager [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Received unexpected event network-vif-plugged-bbcf46a8-cec2-4c54-8c6a-c1ba126b1676 for instance with vm_state building and task_state spawning. [ 1873.697086] env[63279]: DEBUG nova.compute.manager [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Received event network-changed-bbcf46a8-cec2-4c54-8c6a-c1ba126b1676 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1873.697311] env[63279]: DEBUG nova.compute.manager [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Refreshing instance network info cache due to event network-changed-bbcf46a8-cec2-4c54-8c6a-c1ba126b1676. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1873.697311] env[63279]: DEBUG oslo_concurrency.lockutils [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] Acquiring lock "refresh_cache-ecedded1-7169-49a4-8a9e-2fe4086db986" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.697311] env[63279]: DEBUG oslo_concurrency.lockutils [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] Acquired lock "refresh_cache-ecedded1-7169-49a4-8a9e-2fe4086db986" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1873.697311] env[63279]: DEBUG nova.network.neutron [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Refreshing network info cache for port bbcf46a8-cec2-4c54-8c6a-c1ba126b1676 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1873.772041] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2086899, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600569} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.772468] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 357f08c9-4de9-4b84-8384-6bf130872f40/357f08c9-4de9-4b84-8384-6bf130872f40.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1873.773410] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1873.773410] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d5debd93-907d-4e6c-b92b-1618c0a786b6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.780637] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1873.780637] env[63279]: value = "task-2086904" [ 1873.780637] env[63279]: _type = "Task" [ 1873.780637] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.790590] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2086904, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.886782] env[63279]: DEBUG oslo_vmware.api [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086900, 'name': PowerOffVM_Task, 'duration_secs': 0.156209} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.887801] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1873.888091] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1873.888418] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ea6d66f-b929-4db2-acf5-388ccf65a351 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.002678] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e47f76-071e-40ea-b4aa-ee775cfca50d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "5d4be656-defe-4332-b97e-e88b107ca4a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.194s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.041609] env[63279]: DEBUG nova.network.neutron [-] [instance: 32e84715-0345-4171-abb7-c034a501347e] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.048876] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1874.049279] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1874.049357] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleting the datastore file [datastore1] 41654a82-af5e-463e-80f9-86ba13a5ad2e {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1874.049620] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d25bb1ae-9875-473e-8f17-3fe79a80799d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.062610] env[63279]: DEBUG oslo_vmware.api [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1874.062610] env[63279]: value = "task-2086906" [ 1874.062610] env[63279]: _type = "Task" [ 1874.062610] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.075627] env[63279]: DEBUG oslo_vmware.api [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.081877] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086897, 'name': CloneVM_Task} progress is 95%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.098141] env[63279]: DEBUG nova.compute.manager [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1874.140165] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086903, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.240037] env[63279]: DEBUG nova.network.neutron [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Successfully created port: e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1874.292409] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2086904, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114219} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.295968] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1874.297042] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0253a33a-a107-4e18-b0d4-86737dc35f91 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.325640] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 357f08c9-4de9-4b84-8384-6bf130872f40/357f08c9-4de9-4b84-8384-6bf130872f40.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1874.326950] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1874.327814] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1874.327814] env[63279]: DEBUG nova.network.neutron [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1874.331660] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51f9623a-2894-4798-94d3-d1d35b601b97 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.359902] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1874.359902] env[63279]: value = "task-2086907" [ 1874.359902] env[63279]: _type = "Task" [ 1874.359902] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.375370] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2086907, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.506102] env[63279]: DEBUG nova.compute.manager [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1874.540038] env[63279]: DEBUG nova.network.neutron [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Updated VIF entry in instance network info cache for port bbcf46a8-cec2-4c54-8c6a-c1ba126b1676. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1874.540550] env[63279]: DEBUG nova.network.neutron [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Updating instance_info_cache with network_info: [{"id": "bbcf46a8-cec2-4c54-8c6a-c1ba126b1676", "address": "fa:16:3e:e1:30:df", "network": {"id": "08d905b3-a951-4c93-b0ed-a9e73ab6b443", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1358149569-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e271c0d8278c4192950ebc988485dd5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcf46a8-ce", "ovs_interfaceid": "bbcf46a8-cec2-4c54-8c6a-c1ba126b1676", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.548246] env[63279]: INFO nova.compute.manager [-] [instance: 32e84715-0345-4171-abb7-c034a501347e] Took 1.36 seconds to deallocate network for instance. [ 1874.579498] env[63279]: DEBUG oslo_vmware.api [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2086906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179383} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.579733] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1874.579960] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1874.580305] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1874.580569] env[63279]: INFO nova.compute.manager [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1874.580841] env[63279]: DEBUG oslo.service.loopingcall [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1874.581402] env[63279]: DEBUG nova.compute.manager [-] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1874.581496] env[63279]: DEBUG nova.network.neutron [-] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1874.586987] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086897, 'name': CloneVM_Task, 'duration_secs': 1.894709} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.590259] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Created linked-clone VM from snapshot [ 1874.592032] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-841ecd26-8612-4eb3-b0e6-f0064d1db946 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.601151] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Uploading image abaee334-d484-43f2-a4a9-fbaf18115898 {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1874.615532] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1874.615860] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f1262118-c23e-4b87-8325-ba8670af8100 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.632905] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 1874.632905] env[63279]: value = "task-2086908" [ 1874.632905] env[63279]: _type = "Task" [ 1874.632905] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.645109] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086903, 'name': CreateVM_Task, 'duration_secs': 0.562322} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.646378] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1874.647056] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1874.647358] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1874.647742] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1874.651148] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a72a000c-a8e3-4bdb-8465-0e560cf77609 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.653501] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086908, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.660906] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Waiting for the task: (returnval){ [ 1874.660906] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d11c9f-5034-90fb-e1d8-45e77127d827" [ 1874.660906] env[63279]: _type = "Task" [ 1874.660906] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.671340] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d11c9f-5034-90fb-e1d8-45e77127d827, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.796460] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61940c4-2811-4077-8165-d442defad3b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.803632] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e968d018-d8be-4412-934a-b9747b4aac11 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.837103] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0c2f0c-731f-4941-a7fb-00166fd42d4f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.845266] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42c6d32-202f-4a61-a621-241e1aa27392 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.862492] env[63279]: DEBUG nova.compute.provider_tree [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1874.872650] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2086907, 'name': ReconfigVM_Task, 'duration_secs': 0.404686} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.876022] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 357f08c9-4de9-4b84-8384-6bf130872f40/357f08c9-4de9-4b84-8384-6bf130872f40.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1874.876022] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b0a7e8c-6a85-4a8e-a7a5-50bc760df852 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.881018] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1874.881018] env[63279]: value = "task-2086909" [ 1874.881018] env[63279]: _type = "Task" [ 1874.881018] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.890109] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2086909, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.031663] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.046096] env[63279]: DEBUG oslo_concurrency.lockutils [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] Releasing lock "refresh_cache-ecedded1-7169-49a4-8a9e-2fe4086db986" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1875.046096] env[63279]: DEBUG nova.compute.manager [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] [instance: 32e84715-0345-4171-abb7-c034a501347e] Received event network-vif-deleted-9f0ded4b-2531-4688-a7fd-e27055112a5c {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1875.046096] env[63279]: INFO nova.compute.manager [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] [instance: 32e84715-0345-4171-abb7-c034a501347e] Neutron deleted interface 9f0ded4b-2531-4688-a7fd-e27055112a5c; detaching it from the instance and deleting it from the info cache [ 1875.046096] env[63279]: DEBUG nova.network.neutron [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] [instance: 32e84715-0345-4171-abb7-c034a501347e] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.058416] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.069659] env[63279]: DEBUG nova.compute.manager [req-558d1103-aae9-45fc-b30c-287c0e0a6a82 req-18ebfec1-b628-4637-a285-5c15678b025d service nova] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Received event network-vif-deleted-7d76225b-f1ff-4793-9c29-9a2cd30c6b43 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1875.069847] env[63279]: INFO nova.compute.manager [req-558d1103-aae9-45fc-b30c-287c0e0a6a82 req-18ebfec1-b628-4637-a285-5c15678b025d service nova] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Neutron deleted interface 7d76225b-f1ff-4793-9c29-9a2cd30c6b43; detaching it from the instance and deleting it from the info cache [ 1875.070127] env[63279]: DEBUG nova.network.neutron [req-558d1103-aae9-45fc-b30c-287c0e0a6a82 req-18ebfec1-b628-4637-a285-5c15678b025d service nova] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.109222] env[63279]: DEBUG nova.compute.manager [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1875.137512] env[63279]: DEBUG nova.virt.hardware [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1875.137754] env[63279]: DEBUG nova.virt.hardware [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1875.137911] env[63279]: DEBUG nova.virt.hardware [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1875.138107] env[63279]: DEBUG nova.virt.hardware [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1875.138257] env[63279]: DEBUG nova.virt.hardware [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1875.138433] env[63279]: DEBUG nova.virt.hardware [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1875.138660] env[63279]: DEBUG nova.virt.hardware [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1875.138821] env[63279]: DEBUG nova.virt.hardware [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1875.138987] env[63279]: DEBUG nova.virt.hardware [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1875.139235] env[63279]: DEBUG nova.virt.hardware [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1875.139411] env[63279]: DEBUG nova.virt.hardware [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1875.140702] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9af57d0-5879-4ba6-ae1c-a213add610ff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.155069] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa426da6-abb5-47cb-8502-e0acc90abad9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.159916] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086908, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.178896] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d11c9f-5034-90fb-e1d8-45e77127d827, 'name': SearchDatastore_Task, 'duration_secs': 0.018676} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.179244] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1875.179486] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1875.179725] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1875.179874] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1875.180065] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1875.180395] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9aee4189-053d-4dbf-a503-7bd9b15a5eb0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.183526] env[63279]: DEBUG nova.network.neutron [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating instance_info_cache with network_info: [{"id": "2cc4a33a-bd88-4aec-a588-7c821bebf971", "address": "fa:16:3e:de:77:2e", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc4a33a-bd", "ovs_interfaceid": "2cc4a33a-bd88-4aec-a588-7c821bebf971", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.188886] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1875.189102] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1875.189829] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e80115d-5a45-49bb-a2ec-7b2f08d64aac {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.198163] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Waiting for the task: (returnval){ [ 1875.198163] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52cf81b0-edf5-13e5-b3ef-b29d1a7e3e17" [ 1875.198163] env[63279]: _type = "Task" [ 1875.198163] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.206831] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52cf81b0-edf5-13e5-b3ef-b29d1a7e3e17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.392534] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2086909, 'name': Rename_Task, 'duration_secs': 0.184383} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.393793] env[63279]: ERROR nova.scheduler.client.report [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [req-4830935f-0cbd-4251-a2bb-fc3e4bdaf23f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4830935f-0cbd-4251-a2bb-fc3e4bdaf23f"}]} [ 1875.394251] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1875.396479] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ba6729c-ac32-4b1a-927e-061d33932155 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.405568] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1875.405568] env[63279]: value = "task-2086910" [ 1875.405568] env[63279]: _type = "Task" [ 1875.405568] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.421649] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2086910, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.422485] env[63279]: DEBUG nova.scheduler.client.report [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1875.444582] env[63279]: DEBUG nova.scheduler.client.report [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1875.444808] env[63279]: DEBUG nova.compute.provider_tree [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1875.458498] env[63279]: DEBUG nova.scheduler.client.report [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1875.479068] env[63279]: DEBUG nova.scheduler.client.report [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1875.489087] env[63279]: DEBUG nova.network.neutron [-] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.551219] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60cde57a-bf8d-42d1-9377-14e5e8904cb1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.563729] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b428ff-0664-4ebf-94eb-b5f82034ec16 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.579278] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e953919c-7ee4-4a92-ac69-45fe9b2e65bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.587974] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2636dbc4-f6eb-4331-a91a-c87c8ee1c988 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.613342] env[63279]: DEBUG nova.compute.manager [req-d3176472-1ae3-47a7-93b4-3526e78b873b req-47b2046d-7d67-4dce-96e6-2230ebaf358c service nova] [instance: 32e84715-0345-4171-abb7-c034a501347e] Detach interface failed, port_id=9f0ded4b-2531-4688-a7fd-e27055112a5c, reason: Instance 32e84715-0345-4171-abb7-c034a501347e could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 1875.637230] env[63279]: DEBUG nova.compute.manager [req-558d1103-aae9-45fc-b30c-287c0e0a6a82 req-18ebfec1-b628-4637-a285-5c15678b025d service nova] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Detach interface failed, port_id=7d76225b-f1ff-4793-9c29-9a2cd30c6b43, reason: Instance 41654a82-af5e-463e-80f9-86ba13a5ad2e could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 1875.653095] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086908, 'name': Destroy_Task, 'duration_secs': 0.663975} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.655775] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Destroyed the VM [ 1875.656130] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1875.656885] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f8e0e996-187c-45b7-b925-f5ae13bce947 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.664061] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 1875.664061] env[63279]: value = "task-2086911" [ 1875.664061] env[63279]: _type = "Task" [ 1875.664061] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.675043] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086911, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.685821] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Releasing lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1875.705738] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52cf81b0-edf5-13e5-b3ef-b29d1a7e3e17, 'name': SearchDatastore_Task, 'duration_secs': 0.011268} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.709574] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81b7c7e5-f5fa-458e-9abf-76797a7c4cd0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.716882] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Waiting for the task: (returnval){ [ 1875.716882] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529471e6-edc1-2f34-7d46-c2ce8ab56593" [ 1875.716882] env[63279]: _type = "Task" [ 1875.716882] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.726775] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529471e6-edc1-2f34-7d46-c2ce8ab56593, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.916646] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2086910, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.977009] env[63279]: DEBUG nova.network.neutron [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Successfully updated port: e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1875.994618] env[63279]: INFO nova.compute.manager [-] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Took 1.41 seconds to deallocate network for instance. [ 1876.070178] env[63279]: DEBUG nova.compute.manager [req-714af486-b8de-4d45-8e12-f31bb31264f1 req-bdfa72ca-4a74-451d-a1a3-bac588d82c1e service nova] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Received event network-vif-plugged-e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1876.070412] env[63279]: DEBUG oslo_concurrency.lockutils [req-714af486-b8de-4d45-8e12-f31bb31264f1 req-bdfa72ca-4a74-451d-a1a3-bac588d82c1e service nova] Acquiring lock "ff2f355a-9687-4491-b243-6133e4b7b866-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.070647] env[63279]: DEBUG oslo_concurrency.lockutils [req-714af486-b8de-4d45-8e12-f31bb31264f1 req-bdfa72ca-4a74-451d-a1a3-bac588d82c1e service nova] Lock "ff2f355a-9687-4491-b243-6133e4b7b866-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.070789] env[63279]: DEBUG oslo_concurrency.lockutils [req-714af486-b8de-4d45-8e12-f31bb31264f1 req-bdfa72ca-4a74-451d-a1a3-bac588d82c1e service nova] Lock "ff2f355a-9687-4491-b243-6133e4b7b866-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.070961] env[63279]: DEBUG nova.compute.manager [req-714af486-b8de-4d45-8e12-f31bb31264f1 req-bdfa72ca-4a74-451d-a1a3-bac588d82c1e service nova] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] No waiting events found dispatching network-vif-plugged-e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1876.071139] env[63279]: WARNING nova.compute.manager [req-714af486-b8de-4d45-8e12-f31bb31264f1 req-bdfa72ca-4a74-451d-a1a3-bac588d82c1e service nova] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Received unexpected event network-vif-plugged-e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8 for instance with vm_state building and task_state spawning. [ 1876.073919] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f303e034-7936-4321-960b-ff71e86278e5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.082265] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22769ec-2130-4598-b568-634d7173f892 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.116293] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b9680c-a85b-4e1d-938f-33849ae15a7a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.124217] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e93531d-0b5e-474e-934b-e5b269de01ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.139757] env[63279]: DEBUG nova.compute.provider_tree [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1876.174201] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086911, 'name': RemoveSnapshot_Task} progress is 76%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.212781] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60146487-1fed-493b-ac69-c42c0dc780ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.233494] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d47f9a-ba13-40f5-9201-43d0f5d41737 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.239707] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529471e6-edc1-2f34-7d46-c2ce8ab56593, 'name': SearchDatastore_Task, 'duration_secs': 0.012548} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.240392] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1876.240874] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ecedded1-7169-49a4-8a9e-2fe4086db986/ecedded1-7169-49a4-8a9e-2fe4086db986.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1876.240981] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d53081b-7ead-4a50-bf7c-7a1df2fceacf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.245365] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating instance '5656c853-ac83-47be-83c4-979a9e87ab91' progress to 83 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1876.253745] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Waiting for the task: (returnval){ [ 1876.253745] env[63279]: value = "task-2086912" [ 1876.253745] env[63279]: _type = "Task" [ 1876.253745] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.261601] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2086912, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.420888] env[63279]: DEBUG oslo_vmware.api [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2086910, 'name': PowerOnVM_Task, 'duration_secs': 0.554356} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.420888] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1876.420888] env[63279]: INFO nova.compute.manager [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Took 8.37 seconds to spawn the instance on the hypervisor. [ 1876.420888] env[63279]: DEBUG nova.compute.manager [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1876.420888] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973758dd-ae3e-470f-817b-07f7fee94654 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.480503] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "refresh_cache-ff2f355a-9687-4491-b243-6133e4b7b866" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.480697] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquired lock "refresh_cache-ff2f355a-9687-4491-b243-6133e4b7b866" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.480852] env[63279]: DEBUG nova.network.neutron [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1876.501010] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.677510] env[63279]: DEBUG oslo_vmware.api [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086911, 'name': RemoveSnapshot_Task, 'duration_secs': 0.87169} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.680306] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1876.684023] env[63279]: DEBUG nova.scheduler.client.report [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 55 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1876.684547] env[63279]: DEBUG nova.compute.provider_tree [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 55 to 56 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1876.684838] env[63279]: DEBUG nova.compute.provider_tree [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1876.752840] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1876.753653] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7e393c0a-5392-4a17-90e1-8d958e6dcc03 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.768423] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2086912, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.770021] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 1876.770021] env[63279]: value = "task-2086913" [ 1876.770021] env[63279]: _type = "Task" [ 1876.770021] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.778115] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086913, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.943250] env[63279]: INFO nova.compute.manager [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Took 39.03 seconds to build instance. [ 1877.024900] env[63279]: DEBUG nova.network.neutron [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1877.193018] env[63279]: WARNING nova.compute.manager [None req-2348a2ee-5449-455f-aa56-c04b59d9fc77 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Image not found during snapshot: nova.exception.ImageNotFound: Image abaee334-d484-43f2-a4a9-fbaf18115898 could not be found. [ 1877.193134] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.114s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.193730] env[63279]: DEBUG nova.compute.manager [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1877.197882] env[63279]: DEBUG nova.network.neutron [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Updating instance_info_cache with network_info: [{"id": "e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8", "address": "fa:16:3e:65:bd:d9", "network": {"id": "6b1ead98-699d-4ad0-bb54-2a4fa09faf6d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-861811231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ea345c4bc9b4781acb6f774ac88c690", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3bb5b3e-6a", "ovs_interfaceid": "e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.199287] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.137s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.201466] env[63279]: INFO nova.compute.claims [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1877.268951] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2086912, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.650461} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.269256] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ecedded1-7169-49a4-8a9e-2fe4086db986/ecedded1-7169-49a4-8a9e-2fe4086db986.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1877.269504] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1877.270151] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf697c66-5116-4732-86fe-a4f5407fea07 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.281603] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086913, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.282841] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Waiting for the task: (returnval){ [ 1877.282841] env[63279]: value = "task-2086914" [ 1877.282841] env[63279]: _type = "Task" [ 1877.282841] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.293158] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2086914, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.445460] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fb63d81c-1d8d-4e24-b42b-e37e673a6c54 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "357f08c9-4de9-4b84-8384-6bf130872f40" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.612s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.706743] env[63279]: DEBUG nova.compute.utils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1877.708434] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Releasing lock "refresh_cache-ff2f355a-9687-4491-b243-6133e4b7b866" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1877.708434] env[63279]: DEBUG nova.compute.manager [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Instance network_info: |[{"id": "e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8", "address": "fa:16:3e:65:bd:d9", "network": {"id": "6b1ead98-699d-4ad0-bb54-2a4fa09faf6d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-861811231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ea345c4bc9b4781acb6f774ac88c690", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3bb5b3e-6a", "ovs_interfaceid": "e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1877.713148] env[63279]: DEBUG nova.compute.manager [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1877.713148] env[63279]: DEBUG nova.network.neutron [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1877.713148] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:bd:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1877.720914] env[63279]: DEBUG oslo.service.loopingcall [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1877.725110] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1877.726183] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d20c51f5-6376-41b0-a6d9-6aad007b0476 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.748647] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1877.748647] env[63279]: value = "task-2086915" [ 1877.748647] env[63279]: _type = "Task" [ 1877.748647] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.762728] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086915, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.785913] env[63279]: DEBUG oslo_vmware.api [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2086913, 'name': PowerOnVM_Task, 'duration_secs': 0.600181} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.789438] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1877.789555] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7e8546-3d55-4a5a-887c-fa75360a7a03 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating instance '5656c853-ac83-47be-83c4-979a9e87ab91' progress to 100 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1877.796849] env[63279]: DEBUG nova.policy [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da15736f9ab6473496caca06bcd2e8b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd496723f0a4e4e3aac1cc2895854d3a5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1877.807706] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2086914, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088723} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.807706] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1877.808688] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23101f68-9729-428e-9977-df21392043c5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.837382] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] ecedded1-7169-49a4-8a9e-2fe4086db986/ecedded1-7169-49a4-8a9e-2fe4086db986.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1877.840402] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49a0ad2b-20df-49ce-b668-4b398e6f0b83 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.864015] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Waiting for the task: (returnval){ [ 1877.864015] env[63279]: value = "task-2086916" [ 1877.864015] env[63279]: _type = "Task" [ 1877.864015] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.870594] env[63279]: DEBUG oslo_concurrency.lockutils [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.870853] env[63279]: DEBUG oslo_concurrency.lockutils [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.871076] env[63279]: DEBUG oslo_concurrency.lockutils [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.871265] env[63279]: DEBUG oslo_concurrency.lockutils [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.871441] env[63279]: DEBUG oslo_concurrency.lockutils [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.876964] env[63279]: INFO nova.compute.manager [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Terminating instance [ 1877.883287] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2086916, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.949161] env[63279]: DEBUG nova.compute.manager [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1878.174153] env[63279]: DEBUG nova.compute.manager [req-d9b188b9-0613-49c8-ae5a-c3a4d0e5acd2 req-8527af14-8587-410a-be8c-0ba9773e9b7a service nova] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Received event network-changed-e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1878.174153] env[63279]: DEBUG nova.compute.manager [req-d9b188b9-0613-49c8-ae5a-c3a4d0e5acd2 req-8527af14-8587-410a-be8c-0ba9773e9b7a service nova] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Refreshing instance network info cache due to event network-changed-e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1878.174415] env[63279]: DEBUG oslo_concurrency.lockutils [req-d9b188b9-0613-49c8-ae5a-c3a4d0e5acd2 req-8527af14-8587-410a-be8c-0ba9773e9b7a service nova] Acquiring lock "refresh_cache-ff2f355a-9687-4491-b243-6133e4b7b866" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.174459] env[63279]: DEBUG oslo_concurrency.lockutils [req-d9b188b9-0613-49c8-ae5a-c3a4d0e5acd2 req-8527af14-8587-410a-be8c-0ba9773e9b7a service nova] Acquired lock "refresh_cache-ff2f355a-9687-4491-b243-6133e4b7b866" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.174640] env[63279]: DEBUG nova.network.neutron [req-d9b188b9-0613-49c8-ae5a-c3a4d0e5acd2 req-8527af14-8587-410a-be8c-0ba9773e9b7a service nova] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Refreshing network info cache for port e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1878.213231] env[63279]: DEBUG nova.compute.manager [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1878.268653] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086915, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.314524] env[63279]: DEBUG nova.network.neutron [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Successfully created port: d88bfcf3-27a0-4cbc-b13c-12cb6a612415 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1878.378576] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2086916, 'name': ReconfigVM_Task, 'duration_secs': 0.501022} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.378859] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Reconfigured VM instance instance-00000017 to attach disk [datastore1] ecedded1-7169-49a4-8a9e-2fe4086db986/ecedded1-7169-49a4-8a9e-2fe4086db986.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1878.379535] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8997f922-01a6-467c-8d27-df64545f7c02 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.387593] env[63279]: DEBUG nova.compute.manager [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1878.387803] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1878.394778] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-819ad083-2bcc-49a1-aec2-6ef97bbfd620 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.398539] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Waiting for the task: (returnval){ [ 1878.398539] env[63279]: value = "task-2086917" [ 1878.398539] env[63279]: _type = "Task" [ 1878.398539] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.408093] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1878.410056] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e273361d-463d-469a-b3f3-36c991710759 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.417289] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2086917, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.420034] env[63279]: DEBUG oslo_vmware.api [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 1878.420034] env[63279]: value = "task-2086918" [ 1878.420034] env[63279]: _type = "Task" [ 1878.420034] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.430912] env[63279]: DEBUG oslo_vmware.api [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086918, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.438890] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b877bb5-da32-49ad-946d-d2c0e2bc93ff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.448368] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0136cf64-7a43-468f-ae9e-9e266c8f6d70 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.490366] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.490366] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c979ba0e-1b7a-434c-9237-bc7ca9f7dcf1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.499686] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0adb5b-ae12-48b7-8716-52e79f2575b0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.515648] env[63279]: DEBUG nova.compute.provider_tree [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1878.760319] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086915, 'name': CreateVM_Task, 'duration_secs': 0.662243} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.760811] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1878.763016] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.763016] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.763016] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1878.763016] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dafca4a7-3783-450c-b060-2c8e002bc47e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.767016] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1878.767016] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c4a136-de08-6e9f-86bb-f6710833fbb7" [ 1878.767016] env[63279]: _type = "Task" [ 1878.767016] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.777253] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c4a136-de08-6e9f-86bb-f6710833fbb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.910974] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2086917, 'name': Rename_Task, 'duration_secs': 0.186985} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.911326] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1878.911585] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dec815af-2f6e-4509-b31b-867e58905b30 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.918284] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Waiting for the task: (returnval){ [ 1878.918284] env[63279]: value = "task-2086919" [ 1878.918284] env[63279]: _type = "Task" [ 1878.918284] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.934050] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2086919, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.937395] env[63279]: DEBUG oslo_vmware.api [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086918, 'name': PowerOffVM_Task, 'duration_secs': 0.266922} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.937732] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1878.937944] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1878.938283] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-deeb5779-0db6-4989-9e1c-d2c80a0ad8fb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.974762] env[63279]: DEBUG nova.network.neutron [req-d9b188b9-0613-49c8-ae5a-c3a4d0e5acd2 req-8527af14-8587-410a-be8c-0ba9773e9b7a service nova] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Updated VIF entry in instance network info cache for port e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1878.975153] env[63279]: DEBUG nova.network.neutron [req-d9b188b9-0613-49c8-ae5a-c3a4d0e5acd2 req-8527af14-8587-410a-be8c-0ba9773e9b7a service nova] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Updating instance_info_cache with network_info: [{"id": "e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8", "address": "fa:16:3e:65:bd:d9", "network": {"id": "6b1ead98-699d-4ad0-bb54-2a4fa09faf6d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-861811231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ea345c4bc9b4781acb6f774ac88c690", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3bb5b3e-6a", "ovs_interfaceid": "e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.018856] env[63279]: DEBUG nova.scheduler.client.report [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1879.033823] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1879.034300] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1879.034509] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Deleting the datastore file [datastore1] 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1879.035483] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63d2a346-8867-44f7-94a3-1be6c535d4fd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.042812] env[63279]: DEBUG oslo_vmware.api [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 1879.042812] env[63279]: value = "task-2086921" [ 1879.042812] env[63279]: _type = "Task" [ 1879.042812] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.051569] env[63279]: DEBUG oslo_vmware.api [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086921, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.230474] env[63279]: DEBUG nova.compute.manager [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1879.252924] env[63279]: DEBUG nova.virt.hardware [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1879.253204] env[63279]: DEBUG nova.virt.hardware [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1879.253371] env[63279]: DEBUG nova.virt.hardware [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1879.253555] env[63279]: DEBUG nova.virt.hardware [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1879.255072] env[63279]: DEBUG nova.virt.hardware [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1879.255072] env[63279]: DEBUG nova.virt.hardware [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1879.255072] env[63279]: DEBUG nova.virt.hardware [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1879.255072] env[63279]: DEBUG nova.virt.hardware [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1879.255072] env[63279]: DEBUG nova.virt.hardware [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1879.255275] env[63279]: DEBUG nova.virt.hardware [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1879.255275] env[63279]: DEBUG nova.virt.hardware [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1879.256591] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343e7405-4659-4d84-82bf-4b7c1ca125ed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.265162] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62dc4cc3-c8e5-49a5-adda-9f3e521b1edc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.291414] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c4a136-de08-6e9f-86bb-f6710833fbb7, 'name': SearchDatastore_Task, 'duration_secs': 0.014451} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.291585] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.291845] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1879.292248] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.292652] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.293182] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1879.293477] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4ecbcca-b318-45c7-bcd3-3ca7f2a19952 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.302255] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1879.302454] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1879.303556] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a8d8946-09ac-42e7-83aa-352dd53cbf8d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.309427] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1879.309427] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b4f1b4-5510-9f88-3c28-e0e8d83e862b" [ 1879.309427] env[63279]: _type = "Task" [ 1879.309427] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.318647] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b4f1b4-5510-9f88-3c28-e0e8d83e862b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.428955] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2086919, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.477787] env[63279]: DEBUG oslo_concurrency.lockutils [req-d9b188b9-0613-49c8-ae5a-c3a4d0e5acd2 req-8527af14-8587-410a-be8c-0ba9773e9b7a service nova] Releasing lock "refresh_cache-ff2f355a-9687-4491-b243-6133e4b7b866" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.524743] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.325s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.525393] env[63279]: DEBUG nova.compute.manager [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1879.528479] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.082s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1879.528748] env[63279]: DEBUG nova.objects.instance [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Lazy-loading 'resources' on Instance uuid abd63285-ee3c-4546-b86d-6d4388765d94 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1879.553576] env[63279]: DEBUG oslo_vmware.api [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2086921, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.272075} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.553849] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1879.554105] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1879.554312] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1879.554513] env[63279]: INFO nova.compute.manager [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1879.554763] env[63279]: DEBUG oslo.service.loopingcall [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1879.555261] env[63279]: DEBUG nova.compute.manager [-] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1879.555382] env[63279]: DEBUG nova.network.neutron [-] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1879.830998] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b4f1b4-5510-9f88-3c28-e0e8d83e862b, 'name': SearchDatastore_Task, 'duration_secs': 0.013935} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.834758] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90e414ea-5047-412b-af56-a6c8fef075f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.850560] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1879.850560] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52513a65-7155-2895-e5ae-dd11d5858b6f" [ 1879.850560] env[63279]: _type = "Task" [ 1879.850560] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.861049] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52513a65-7155-2895-e5ae-dd11d5858b6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.929236] env[63279]: DEBUG oslo_vmware.api [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2086919, 'name': PowerOnVM_Task, 'duration_secs': 0.638897} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.929554] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1879.929740] env[63279]: INFO nova.compute.manager [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Took 8.96 seconds to spawn the instance on the hypervisor. [ 1879.931031] env[63279]: DEBUG nova.compute.manager [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1879.931031] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350e7149-b6f0-4b10-b56a-9b57f56e98dd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.956769] env[63279]: DEBUG nova.compute.manager [req-dde60f3a-3b7e-4bca-90c1-6d00205be3c1 req-bf47ef5b-82d4-455d-82cc-424636873b0d service nova] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Received event network-vif-deleted-c1420e26-8471-4d0e-868c-9ff9ead67869 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1879.956769] env[63279]: INFO nova.compute.manager [req-dde60f3a-3b7e-4bca-90c1-6d00205be3c1 req-bf47ef5b-82d4-455d-82cc-424636873b0d service nova] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Neutron deleted interface c1420e26-8471-4d0e-868c-9ff9ead67869; detaching it from the instance and deleting it from the info cache [ 1879.956848] env[63279]: DEBUG nova.network.neutron [req-dde60f3a-3b7e-4bca-90c1-6d00205be3c1 req-bf47ef5b-82d4-455d-82cc-424636873b0d service nova] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.032509] env[63279]: DEBUG nova.compute.utils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1880.037876] env[63279]: DEBUG nova.compute.manager [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1880.038128] env[63279]: DEBUG nova.network.neutron [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1880.080774] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.081308] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.106646] env[63279]: DEBUG nova.policy [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8e9485721a642f6b056e27f42c73958', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0fd8bdc7d8664db698236f56d82adcf0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1880.179358] env[63279]: DEBUG nova.network.neutron [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Successfully updated port: d88bfcf3-27a0-4cbc-b13c-12cb6a612415 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1880.313065] env[63279]: DEBUG nova.compute.manager [req-e02a620c-84cc-45ac-84b3-aca3ac5f80cd req-e5553590-aeff-45d7-ac9d-2f1d6b8fb363 service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Received event network-vif-plugged-d88bfcf3-27a0-4cbc-b13c-12cb6a612415 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1880.313385] env[63279]: DEBUG oslo_concurrency.lockutils [req-e02a620c-84cc-45ac-84b3-aca3ac5f80cd req-e5553590-aeff-45d7-ac9d-2f1d6b8fb363 service nova] Acquiring lock "32dbef6d-d314-4fa6-972a-e7b1f22eb11d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.313606] env[63279]: DEBUG oslo_concurrency.lockutils [req-e02a620c-84cc-45ac-84b3-aca3ac5f80cd req-e5553590-aeff-45d7-ac9d-2f1d6b8fb363 service nova] Lock "32dbef6d-d314-4fa6-972a-e7b1f22eb11d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.313774] env[63279]: DEBUG oslo_concurrency.lockutils [req-e02a620c-84cc-45ac-84b3-aca3ac5f80cd req-e5553590-aeff-45d7-ac9d-2f1d6b8fb363 service nova] Lock "32dbef6d-d314-4fa6-972a-e7b1f22eb11d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.313935] env[63279]: DEBUG nova.compute.manager [req-e02a620c-84cc-45ac-84b3-aca3ac5f80cd req-e5553590-aeff-45d7-ac9d-2f1d6b8fb363 service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] No waiting events found dispatching network-vif-plugged-d88bfcf3-27a0-4cbc-b13c-12cb6a612415 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1880.314269] env[63279]: WARNING nova.compute.manager [req-e02a620c-84cc-45ac-84b3-aca3ac5f80cd req-e5553590-aeff-45d7-ac9d-2f1d6b8fb363 service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Received unexpected event network-vif-plugged-d88bfcf3-27a0-4cbc-b13c-12cb6a612415 for instance with vm_state building and task_state spawning. [ 1880.314500] env[63279]: DEBUG nova.compute.manager [req-e02a620c-84cc-45ac-84b3-aca3ac5f80cd req-e5553590-aeff-45d7-ac9d-2f1d6b8fb363 service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Received event network-changed-d88bfcf3-27a0-4cbc-b13c-12cb6a612415 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1880.314732] env[63279]: DEBUG nova.compute.manager [req-e02a620c-84cc-45ac-84b3-aca3ac5f80cd req-e5553590-aeff-45d7-ac9d-2f1d6b8fb363 service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Refreshing instance network info cache due to event network-changed-d88bfcf3-27a0-4cbc-b13c-12cb6a612415. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1880.315351] env[63279]: DEBUG oslo_concurrency.lockutils [req-e02a620c-84cc-45ac-84b3-aca3ac5f80cd req-e5553590-aeff-45d7-ac9d-2f1d6b8fb363 service nova] Acquiring lock "refresh_cache-32dbef6d-d314-4fa6-972a-e7b1f22eb11d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1880.315351] env[63279]: DEBUG oslo_concurrency.lockutils [req-e02a620c-84cc-45ac-84b3-aca3ac5f80cd req-e5553590-aeff-45d7-ac9d-2f1d6b8fb363 service nova] Acquired lock "refresh_cache-32dbef6d-d314-4fa6-972a-e7b1f22eb11d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.315351] env[63279]: DEBUG nova.network.neutron [req-e02a620c-84cc-45ac-84b3-aca3ac5f80cd req-e5553590-aeff-45d7-ac9d-2f1d6b8fb363 service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Refreshing network info cache for port d88bfcf3-27a0-4cbc-b13c-12cb6a612415 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1880.351540] env[63279]: DEBUG nova.network.neutron [-] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.368950] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52513a65-7155-2895-e5ae-dd11d5858b6f, 'name': SearchDatastore_Task, 'duration_secs': 0.017415} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.372183] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.373341] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ff2f355a-9687-4491-b243-6133e4b7b866/ff2f355a-9687-4491-b243-6133e4b7b866.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1880.374279] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5433b452-5079-4ba2-b13e-a0b231d4b2b6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.386085] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1880.386085] env[63279]: value = "task-2086922" [ 1880.386085] env[63279]: _type = "Task" [ 1880.386085] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.397238] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086922, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.452399] env[63279]: INFO nova.compute.manager [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Took 39.81 seconds to build instance. [ 1880.463908] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2e4fde1-2457-443d-b6ae-8c73c2f7e8bf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.475737] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4261e54-345f-4e3b-b3d1-8f1686a07cc0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.500602] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "5656c853-ac83-47be-83c4-979a9e87ab91" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.501253] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "5656c853-ac83-47be-83c4-979a9e87ab91" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.501454] env[63279]: DEBUG nova.compute.manager [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Going to confirm migration 1 {{(pid=63279) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1880.514904] env[63279]: DEBUG nova.compute.manager [req-dde60f3a-3b7e-4bca-90c1-6d00205be3c1 req-bf47ef5b-82d4-455d-82cc-424636873b0d service nova] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Detach interface failed, port_id=c1420e26-8471-4d0e-868c-9ff9ead67869, reason: Instance 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 1880.521860] env[63279]: DEBUG nova.network.neutron [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Successfully created port: 8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1880.538322] env[63279]: DEBUG nova.compute.manager [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1880.684564] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Acquiring lock "refresh_cache-32dbef6d-d314-4fa6-972a-e7b1f22eb11d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1880.728260] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0745a2-de0f-4965-bd02-39e295c4f069 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.736627] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b3f7a5-df0e-4138-a5b6-f8827609d099 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.769129] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d979163-2693-4ec0-8e5a-771c9ed5cc08 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.776929] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cccbd01-b724-4b28-b8e2-8a03d8e1c693 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.792667] env[63279]: DEBUG nova.compute.provider_tree [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1880.856899] env[63279]: INFO nova.compute.manager [-] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Took 1.30 seconds to deallocate network for instance. [ 1880.868405] env[63279]: DEBUG nova.network.neutron [req-e02a620c-84cc-45ac-84b3-aca3ac5f80cd req-e5553590-aeff-45d7-ac9d-2f1d6b8fb363 service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1880.896547] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086922, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.959066] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1c67a5c6-4ab3-4770-b276-ecfec0564f8f tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Lock "ecedded1-7169-49a4-8a9e-2fe4086db986" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.376s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.998653] env[63279]: DEBUG nova.network.neutron [req-e02a620c-84cc-45ac-84b3-aca3ac5f80cd req-e5553590-aeff-45d7-ac9d-2f1d6b8fb363 service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1881.095723] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.095926] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.096184] env[63279]: DEBUG nova.network.neutron [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1881.096386] env[63279]: DEBUG nova.objects.instance [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lazy-loading 'info_cache' on Instance uuid 5656c853-ac83-47be-83c4-979a9e87ab91 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1881.297625] env[63279]: DEBUG nova.scheduler.client.report [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1881.362517] env[63279]: DEBUG oslo_concurrency.lockutils [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.401018] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086922, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.853962} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.401018] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ff2f355a-9687-4491-b243-6133e4b7b866/ff2f355a-9687-4491-b243-6133e4b7b866.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1881.401018] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1881.401018] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c1802e6-e1ef-4e1e-9509-5f116f44b92b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.406798] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1881.406798] env[63279]: value = "task-2086923" [ 1881.406798] env[63279]: _type = "Task" [ 1881.406798] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.414824] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086923, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.461258] env[63279]: DEBUG nova.compute.manager [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1881.501087] env[63279]: DEBUG oslo_concurrency.lockutils [req-e02a620c-84cc-45ac-84b3-aca3ac5f80cd req-e5553590-aeff-45d7-ac9d-2f1d6b8fb363 service nova] Releasing lock "refresh_cache-32dbef6d-d314-4fa6-972a-e7b1f22eb11d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.501501] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Acquired lock "refresh_cache-32dbef6d-d314-4fa6-972a-e7b1f22eb11d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.501663] env[63279]: DEBUG nova.network.neutron [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1881.549269] env[63279]: DEBUG nova.compute.manager [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1881.576813] env[63279]: DEBUG nova.virt.hardware [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1881.577089] env[63279]: DEBUG nova.virt.hardware [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1881.577301] env[63279]: DEBUG nova.virt.hardware [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1881.577542] env[63279]: DEBUG nova.virt.hardware [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1881.577697] env[63279]: DEBUG nova.virt.hardware [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1881.577847] env[63279]: DEBUG nova.virt.hardware [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1881.580032] env[63279]: DEBUG nova.virt.hardware [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1881.580267] env[63279]: DEBUG nova.virt.hardware [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1881.584119] env[63279]: DEBUG nova.virt.hardware [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1881.584119] env[63279]: DEBUG nova.virt.hardware [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1881.584119] env[63279]: DEBUG nova.virt.hardware [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1881.584119] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0ed1ab-1a41-4c61-b568-2c59dab4f9f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.591247] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf823f91-81a9-4fd7-83e3-723c4c05f5c0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.805795] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.275s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.810956] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.239s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.811491] env[63279]: DEBUG nova.objects.instance [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lazy-loading 'resources' on Instance uuid c05e9aaa-e251-480c-94d6-56c29bb6282d {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1881.837967] env[63279]: INFO nova.scheduler.client.report [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Deleted allocations for instance abd63285-ee3c-4546-b86d-6d4388765d94 [ 1881.921517] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086923, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074493} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.921517] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1881.921517] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5db6d5-e0dc-4a34-9fcd-973f07dccdab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.947209] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] ff2f355a-9687-4491-b243-6133e4b7b866/ff2f355a-9687-4491-b243-6133e4b7b866.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1881.947706] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9aabde0e-e513-4368-b51b-ab46e2904a22 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.972350] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1881.972350] env[63279]: value = "task-2086924" [ 1881.972350] env[63279]: _type = "Task" [ 1881.972350] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.981810] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086924, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.982766] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.987357] env[63279]: DEBUG nova.compute.manager [req-4e4bcfde-3f6e-4311-b703-23fd946f0ea2 req-03a97606-9b2a-4575-9ac3-1f2db40a52be service nova] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Received event network-changed-bbcf46a8-cec2-4c54-8c6a-c1ba126b1676 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1881.987588] env[63279]: DEBUG nova.compute.manager [req-4e4bcfde-3f6e-4311-b703-23fd946f0ea2 req-03a97606-9b2a-4575-9ac3-1f2db40a52be service nova] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Refreshing instance network info cache due to event network-changed-bbcf46a8-cec2-4c54-8c6a-c1ba126b1676. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1881.987915] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e4bcfde-3f6e-4311-b703-23fd946f0ea2 req-03a97606-9b2a-4575-9ac3-1f2db40a52be service nova] Acquiring lock "refresh_cache-ecedded1-7169-49a4-8a9e-2fe4086db986" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.988027] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e4bcfde-3f6e-4311-b703-23fd946f0ea2 req-03a97606-9b2a-4575-9ac3-1f2db40a52be service nova] Acquired lock "refresh_cache-ecedded1-7169-49a4-8a9e-2fe4086db986" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.988209] env[63279]: DEBUG nova.network.neutron [req-4e4bcfde-3f6e-4311-b703-23fd946f0ea2 req-03a97606-9b2a-4575-9ac3-1f2db40a52be service nova] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Refreshing network info cache for port bbcf46a8-cec2-4c54-8c6a-c1ba126b1676 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1882.055160] env[63279]: DEBUG nova.network.neutron [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1882.082654] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e0f1db-9f73-a702-251e-169ec188cf05/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1882.083608] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b91bd66-db73-4366-8ce8-f99e2453ed48 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.090392] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e0f1db-9f73-a702-251e-169ec188cf05/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1882.090565] env[63279]: ERROR oslo_vmware.rw_handles [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e0f1db-9f73-a702-251e-169ec188cf05/disk-0.vmdk due to incomplete transfer. [ 1882.090792] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9baefdaf-f37d-4a42-8005-7f1b277388e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.098851] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e0f1db-9f73-a702-251e-169ec188cf05/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1882.099102] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Uploaded image 164c17e3-3c50-4370-ab43-45bf9ea752e5 to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1882.101454] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1882.101854] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9cc17168-5f5a-4adf-93d5-040aeb76b48e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.112378] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1882.112378] env[63279]: value = "task-2086925" [ 1882.112378] env[63279]: _type = "Task" [ 1882.112378] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.126016] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086925, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.249118] env[63279]: DEBUG nova.network.neutron [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Updating instance_info_cache with network_info: [{"id": "d88bfcf3-27a0-4cbc-b13c-12cb6a612415", "address": "fa:16:3e:e7:de:d2", "network": {"id": "05023788-bf00-479a-85fa-7f5839b525d8", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1571793632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d496723f0a4e4e3aac1cc2895854d3a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd88bfcf3-27", "ovs_interfaceid": "d88bfcf3-27a0-4cbc-b13c-12cb6a612415", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.291513] env[63279]: DEBUG nova.network.neutron [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Successfully updated port: 8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1882.346164] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2dcf1f85-69d5-43a3-b223-97481fe4bb2a tempest-ServerDiagnosticsV248Test-703192613 tempest-ServerDiagnosticsV248Test-703192613-project-member] Lock "abd63285-ee3c-4546-b86d-6d4388765d94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.217s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.471216] env[63279]: DEBUG nova.network.neutron [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating instance_info_cache with network_info: [{"id": "2cc4a33a-bd88-4aec-a588-7c821bebf971", "address": "fa:16:3e:de:77:2e", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc4a33a-bd", "ovs_interfaceid": "2cc4a33a-bd88-4aec-a588-7c821bebf971", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.485852] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086924, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.623792] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086925, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.751895] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Releasing lock "refresh_cache-32dbef6d-d314-4fa6-972a-e7b1f22eb11d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.752266] env[63279]: DEBUG nova.compute.manager [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Instance network_info: |[{"id": "d88bfcf3-27a0-4cbc-b13c-12cb6a612415", "address": "fa:16:3e:e7:de:d2", "network": {"id": "05023788-bf00-479a-85fa-7f5839b525d8", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1571793632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d496723f0a4e4e3aac1cc2895854d3a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd88bfcf3-27", "ovs_interfaceid": "d88bfcf3-27a0-4cbc-b13c-12cb6a612415", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1882.755353] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:de:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0d7a2b2f-3b49-4dc8-9096-af16144b27a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd88bfcf3-27a0-4cbc-b13c-12cb6a612415', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1882.762546] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Creating folder: Project (d496723f0a4e4e3aac1cc2895854d3a5). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1882.765325] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84323db2-02ed-4039-bae0-98b0c8889de9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.777059] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Created folder: Project (d496723f0a4e4e3aac1cc2895854d3a5) in parent group-v427491. [ 1882.777059] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Creating folder: Instances. Parent ref: group-v427567. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1882.777059] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f6beaba-2ab7-40a9-a9b5-918371203345 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.788609] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Created folder: Instances in parent group-v427567. [ 1882.788852] env[63279]: DEBUG oslo.service.loopingcall [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1882.789055] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1882.789265] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3b6771c4-a86e-4a7e-8fef-cf9e4d1810a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.810077] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.810232] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquired lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.810381] env[63279]: DEBUG nova.network.neutron [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1882.816874] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1882.816874] env[63279]: value = "task-2086928" [ 1882.816874] env[63279]: _type = "Task" [ 1882.816874] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.825796] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086928, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.857357] env[63279]: DEBUG nova.network.neutron [req-4e4bcfde-3f6e-4311-b703-23fd946f0ea2 req-03a97606-9b2a-4575-9ac3-1f2db40a52be service nova] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Updated VIF entry in instance network info cache for port bbcf46a8-cec2-4c54-8c6a-c1ba126b1676. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1882.857783] env[63279]: DEBUG nova.network.neutron [req-4e4bcfde-3f6e-4311-b703-23fd946f0ea2 req-03a97606-9b2a-4575-9ac3-1f2db40a52be service nova] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Updating instance_info_cache with network_info: [{"id": "bbcf46a8-cec2-4c54-8c6a-c1ba126b1676", "address": "fa:16:3e:e1:30:df", "network": {"id": "08d905b3-a951-4c93-b0ed-a9e73ab6b443", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1358149569-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e271c0d8278c4192950ebc988485dd5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcf46a8-ce", "ovs_interfaceid": "bbcf46a8-cec2-4c54-8c6a-c1ba126b1676", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.914072] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6468e3-4ab0-4187-ade4-eed345c34acf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.923651] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f921c681-93cd-4b36-803f-c3c626ca5c0f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.954723] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1fb8a7-eb4f-46ef-931b-032000d11aa0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.962658] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738de8cb-12e4-4cf5-bf96-6112e3f55141 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.979301] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Releasing lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.979301] env[63279]: DEBUG nova.objects.instance [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lazy-loading 'migration_context' on Instance uuid 5656c853-ac83-47be-83c4-979a9e87ab91 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1882.980009] env[63279]: DEBUG nova.compute.provider_tree [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1882.990283] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086924, 'name': ReconfigVM_Task, 'duration_secs': 0.581375} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.991291] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Reconfigured VM instance instance-00000018 to attach disk [datastore1] ff2f355a-9687-4491-b243-6133e4b7b866/ff2f355a-9687-4491-b243-6133e4b7b866.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1882.992204] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-99440e82-2f1b-4373-988a-5c12d97ff953 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.998927] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1882.998927] env[63279]: value = "task-2086929" [ 1882.998927] env[63279]: _type = "Task" [ 1882.998927] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.007406] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086929, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.125955] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086925, 'name': Destroy_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.328763] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086928, 'name': CreateVM_Task, 'duration_secs': 0.467992} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.328900] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1883.329621] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1883.329878] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.330521] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1883.330521] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c7d1f72-54ea-4fce-a433-44cb93d7700e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.335529] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Waiting for the task: (returnval){ [ 1883.335529] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d195d3-f20b-3271-576b-07db72f63e67" [ 1883.335529] env[63279]: _type = "Task" [ 1883.335529] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.343761] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d195d3-f20b-3271-576b-07db72f63e67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.353946] env[63279]: DEBUG nova.network.neutron [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1883.363626] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e4bcfde-3f6e-4311-b703-23fd946f0ea2 req-03a97606-9b2a-4575-9ac3-1f2db40a52be service nova] Releasing lock "refresh_cache-ecedded1-7169-49a4-8a9e-2fe4086db986" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.483207] env[63279]: DEBUG nova.objects.base [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Object Instance<5656c853-ac83-47be-83c4-979a9e87ab91> lazy-loaded attributes: info_cache,migration_context {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1883.484255] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2fb48e-8ea5-4f68-82c1-c4480f48eb4b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.487555] env[63279]: DEBUG nova.scheduler.client.report [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1883.512274] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c4c359d-4efe-4066-a2dc-0169b8ac6884 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.522051] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086929, 'name': Rename_Task, 'duration_secs': 0.304861} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.522396] env[63279]: DEBUG oslo_vmware.api [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 1883.522396] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521d2ffc-1ecb-a580-dfa8-376b3bd605c6" [ 1883.522396] env[63279]: _type = "Task" [ 1883.522396] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.522688] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1883.523310] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1150d71-c2ea-4b0e-80d0-6794d8681f7b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.536022] env[63279]: DEBUG oslo_vmware.api [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521d2ffc-1ecb-a580-dfa8-376b3bd605c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.536022] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1883.536022] env[63279]: value = "task-2086930" [ 1883.536022] env[63279]: _type = "Task" [ 1883.536022] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.543874] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086930, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.551543] env[63279]: DEBUG nova.network.neutron [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Updating instance_info_cache with network_info: [{"id": "8383ed07-21e7-43e0-82a2-0afc179ca995", "address": "fa:16:3e:b6:12:ab", "network": {"id": "a7b83e75-3b16-41db-9395-90dead128e80", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-756195345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd8bdc7d8664db698236f56d82adcf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8383ed07-21", "ovs_interfaceid": "8383ed07-21e7-43e0-82a2-0afc179ca995", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.626687] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086925, 'name': Destroy_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.847416] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d195d3-f20b-3271-576b-07db72f63e67, 'name': SearchDatastore_Task, 'duration_secs': 0.022933} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.847917] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.848046] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1883.848316] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1883.848550] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.848671] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1883.849086] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-925719ab-dd49-4ed9-856d-0edba867b851 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.858513] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1883.858775] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1883.859466] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac2012e4-f452-4670-9fcc-0236ef5aaf80 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.866217] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Waiting for the task: (returnval){ [ 1883.866217] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5299e88d-7dfd-287d-6279-a1b37c2d18af" [ 1883.866217] env[63279]: _type = "Task" [ 1883.866217] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.874658] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5299e88d-7dfd-287d-6279-a1b37c2d18af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.011608] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.199s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.013010] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.729s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.014469] env[63279]: INFO nova.compute.claims [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1884.034445] env[63279]: DEBUG oslo_vmware.api [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521d2ffc-1ecb-a580-dfa8-376b3bd605c6, 'name': SearchDatastore_Task, 'duration_secs': 0.016524} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.036087] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.037650] env[63279]: DEBUG nova.compute.manager [req-103a5a47-90d3-4269-82e1-00ddbd52a162 req-c4953e80-6556-41bf-9f23-c3f658c16744 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Received event network-vif-plugged-8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1884.037853] env[63279]: DEBUG oslo_concurrency.lockutils [req-103a5a47-90d3-4269-82e1-00ddbd52a162 req-c4953e80-6556-41bf-9f23-c3f658c16744 service nova] Acquiring lock "2f5e22f6-ba70-4848-965b-eb1553115323-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.038076] env[63279]: DEBUG oslo_concurrency.lockutils [req-103a5a47-90d3-4269-82e1-00ddbd52a162 req-c4953e80-6556-41bf-9f23-c3f658c16744 service nova] Lock "2f5e22f6-ba70-4848-965b-eb1553115323-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.038255] env[63279]: DEBUG oslo_concurrency.lockutils [req-103a5a47-90d3-4269-82e1-00ddbd52a162 req-c4953e80-6556-41bf-9f23-c3f658c16744 service nova] Lock "2f5e22f6-ba70-4848-965b-eb1553115323-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.038428] env[63279]: DEBUG nova.compute.manager [req-103a5a47-90d3-4269-82e1-00ddbd52a162 req-c4953e80-6556-41bf-9f23-c3f658c16744 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] No waiting events found dispatching network-vif-plugged-8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1884.038597] env[63279]: WARNING nova.compute.manager [req-103a5a47-90d3-4269-82e1-00ddbd52a162 req-c4953e80-6556-41bf-9f23-c3f658c16744 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Received unexpected event network-vif-plugged-8383ed07-21e7-43e0-82a2-0afc179ca995 for instance with vm_state building and task_state spawning. [ 1884.038756] env[63279]: DEBUG nova.compute.manager [req-103a5a47-90d3-4269-82e1-00ddbd52a162 req-c4953e80-6556-41bf-9f23-c3f658c16744 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Received event network-changed-8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1884.038908] env[63279]: DEBUG nova.compute.manager [req-103a5a47-90d3-4269-82e1-00ddbd52a162 req-c4953e80-6556-41bf-9f23-c3f658c16744 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Refreshing instance network info cache due to event network-changed-8383ed07-21e7-43e0-82a2-0afc179ca995. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1884.039088] env[63279]: DEBUG oslo_concurrency.lockutils [req-103a5a47-90d3-4269-82e1-00ddbd52a162 req-c4953e80-6556-41bf-9f23-c3f658c16744 service nova] Acquiring lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1884.040969] env[63279]: INFO nova.scheduler.client.report [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Deleted allocations for instance c05e9aaa-e251-480c-94d6-56c29bb6282d [ 1884.053157] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Releasing lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1884.053157] env[63279]: DEBUG nova.compute.manager [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Instance network_info: |[{"id": "8383ed07-21e7-43e0-82a2-0afc179ca995", "address": "fa:16:3e:b6:12:ab", "network": {"id": "a7b83e75-3b16-41db-9395-90dead128e80", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-756195345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd8bdc7d8664db698236f56d82adcf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8383ed07-21", "ovs_interfaceid": "8383ed07-21e7-43e0-82a2-0afc179ca995", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1884.054515] env[63279]: DEBUG oslo_concurrency.lockutils [req-103a5a47-90d3-4269-82e1-00ddbd52a162 req-c4953e80-6556-41bf-9f23-c3f658c16744 service nova] Acquired lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.057855] env[63279]: DEBUG nova.network.neutron [req-103a5a47-90d3-4269-82e1-00ddbd52a162 req-c4953e80-6556-41bf-9f23-c3f658c16744 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Refreshing network info cache for port 8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1884.057855] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:12:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53955f0e-c162-4cef-8bd5-335b369c36b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8383ed07-21e7-43e0-82a2-0afc179ca995', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1884.066242] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Creating folder: Project (0fd8bdc7d8664db698236f56d82adcf0). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1884.072131] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27c7a1b9-0927-4b38-85ec-9d70457368fd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.074804] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086930, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.086686] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Created folder: Project (0fd8bdc7d8664db698236f56d82adcf0) in parent group-v427491. [ 1884.086916] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Creating folder: Instances. Parent ref: group-v427570. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1884.087434] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d764e488-070c-4719-ab7a-06355a99e06b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.098425] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Created folder: Instances in parent group-v427570. [ 1884.098684] env[63279]: DEBUG oslo.service.loopingcall [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1884.098881] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1884.099389] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44647679-98ed-4b1c-8a00-b400b0e3cd90 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.129507] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086925, 'name': Destroy_Task, 'duration_secs': 1.736256} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.130837] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Destroyed the VM [ 1884.131103] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1884.131610] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1884.131610] env[63279]: value = "task-2086933" [ 1884.131610] env[63279]: _type = "Task" [ 1884.131610] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.131798] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bffbf7db-d952-48c5-8910-509d185aa7e3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.144439] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086933, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.147534] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1884.147534] env[63279]: value = "task-2086934" [ 1884.147534] env[63279]: _type = "Task" [ 1884.147534] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.155839] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086934, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.377739] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5299e88d-7dfd-287d-6279-a1b37c2d18af, 'name': SearchDatastore_Task, 'duration_secs': 0.010694} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.377847] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff5f2cf2-80b2-4dc1-9370-da4f8e182ff2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.383604] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Waiting for the task: (returnval){ [ 1884.383604] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]525544b2-e6f8-cf06-bb21-0a2bb463321d" [ 1884.383604] env[63279]: _type = "Task" [ 1884.383604] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.393055] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525544b2-e6f8-cf06-bb21-0a2bb463321d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.553041] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086930, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.558835] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd33fd20-d1db-4c92-86ab-9afd74102e3e tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "c05e9aaa-e251-480c-94d6-56c29bb6282d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.779s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.646237] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086933, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.657080] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086934, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.895308] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525544b2-e6f8-cf06-bb21-0a2bb463321d, 'name': SearchDatastore_Task, 'duration_secs': 0.010182} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.895614] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1884.896657] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 32dbef6d-d314-4fa6-972a-e7b1f22eb11d/32dbef6d-d314-4fa6-972a-e7b1f22eb11d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1884.896974] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-edba1ece-3e13-429c-a2be-f3d98e69ac12 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.904329] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Waiting for the task: (returnval){ [ 1884.904329] env[63279]: value = "task-2086935" [ 1884.904329] env[63279]: _type = "Task" [ 1884.904329] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.913320] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2086935, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.064188] env[63279]: DEBUG oslo_vmware.api [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086930, 'name': PowerOnVM_Task, 'duration_secs': 1.212402} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.067080] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1885.067337] env[63279]: INFO nova.compute.manager [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Took 9.96 seconds to spawn the instance on the hypervisor. [ 1885.067675] env[63279]: DEBUG nova.compute.manager [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1885.072708] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9335eb55-6b54-4971-8516-206e001bde83 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.151453] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086933, 'name': CreateVM_Task, 'duration_secs': 0.558072} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.154678] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1885.157715] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1885.157849] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1885.158177] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1885.158940] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0eb6784-c26b-4afa-8c74-91d3b089d72d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.164214] env[63279]: DEBUG oslo_vmware.api [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086934, 'name': RemoveSnapshot_Task, 'duration_secs': 0.87423} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.164771] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1885.165038] env[63279]: INFO nova.compute.manager [None req-ae5168ae-dd58-4ce3-bb92-e8f68886bb1b tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Took 17.37 seconds to snapshot the instance on the hypervisor. [ 1885.168936] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 1885.168936] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]522efce8-90cc-36c3-d30d-7b529fd604fb" [ 1885.168936] env[63279]: _type = "Task" [ 1885.168936] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.177128] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]522efce8-90cc-36c3-d30d-7b529fd604fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.207368] env[63279]: DEBUG nova.network.neutron [req-103a5a47-90d3-4269-82e1-00ddbd52a162 req-c4953e80-6556-41bf-9f23-c3f658c16744 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Updated VIF entry in instance network info cache for port 8383ed07-21e7-43e0-82a2-0afc179ca995. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1885.207368] env[63279]: DEBUG nova.network.neutron [req-103a5a47-90d3-4269-82e1-00ddbd52a162 req-c4953e80-6556-41bf-9f23-c3f658c16744 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Updating instance_info_cache with network_info: [{"id": "8383ed07-21e7-43e0-82a2-0afc179ca995", "address": "fa:16:3e:b6:12:ab", "network": {"id": "a7b83e75-3b16-41db-9395-90dead128e80", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-756195345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd8bdc7d8664db698236f56d82adcf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8383ed07-21", "ovs_interfaceid": "8383ed07-21e7-43e0-82a2-0afc179ca995", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.418661] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2086935, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.597335] env[63279]: INFO nova.compute.manager [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Took 43.87 seconds to build instance. [ 1885.653230] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee739957-c03a-4d61-b156-407720a9efd9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.659753] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc510a88-785e-479e-add7-b04f9532c0de {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.697241] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a479e34f-1d35-4f7d-b29d-c9a1713856a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.708500] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]522efce8-90cc-36c3-d30d-7b529fd604fb, 'name': SearchDatastore_Task, 'duration_secs': 0.024178} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.710985] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.711267] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1885.712379] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1885.712379] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1885.712379] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1885.712379] env[63279]: DEBUG oslo_concurrency.lockutils [req-103a5a47-90d3-4269-82e1-00ddbd52a162 req-c4953e80-6556-41bf-9f23-c3f658c16744 service nova] Releasing lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.712667] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-703a27fb-8ea2-4604-adae-875f3581ec86 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.715577] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0f0268-e1b2-4a03-9753-1c7c7588ced2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.733866] env[63279]: DEBUG nova.compute.provider_tree [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1885.740452] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1885.740452] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1885.740452] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66811185-01be-4dc1-804f-8298db583545 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.745640] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 1885.745640] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52e8bcdc-3dbf-c717-6c0d-31ee2a2950a1" [ 1885.745640] env[63279]: _type = "Task" [ 1885.745640] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.754303] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e8bcdc-3dbf-c717-6c0d-31ee2a2950a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.916138] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2086935, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.629369} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.916440] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 32dbef6d-d314-4fa6-972a-e7b1f22eb11d/32dbef6d-d314-4fa6-972a-e7b1f22eb11d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1885.916656] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1885.916906] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6e7fb9e-625a-4bfd-8a14-a467bee6f93a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.924280] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Waiting for the task: (returnval){ [ 1885.924280] env[63279]: value = "task-2086936" [ 1885.924280] env[63279]: _type = "Task" [ 1885.924280] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.932817] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2086936, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.101542] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4eb26c5f-1d6c-44a0-9365-feefd9e1d5a1 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "ff2f355a-9687-4491-b243-6133e4b7b866" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.696s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.242195] env[63279]: DEBUG nova.scheduler.client.report [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1886.258274] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e8bcdc-3dbf-c717-6c0d-31ee2a2950a1, 'name': SearchDatastore_Task, 'duration_secs': 0.058787} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.259113] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5204d6c8-d556-4ac6-b60c-1c2b86bfffb2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.264517] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 1886.264517] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529f89f2-48a9-1866-67e6-71f92dd42662" [ 1886.264517] env[63279]: _type = "Task" [ 1886.264517] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.276355] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529f89f2-48a9-1866-67e6-71f92dd42662, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.434526] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2086936, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065255} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.434802] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1886.435849] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19915578-8409-4290-90be-cf2590c243f0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.462641] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 32dbef6d-d314-4fa6-972a-e7b1f22eb11d/32dbef6d-d314-4fa6-972a-e7b1f22eb11d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1886.462641] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee06c8b7-f646-4a76-89d4-e80036812b11 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.482342] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Waiting for the task: (returnval){ [ 1886.482342] env[63279]: value = "task-2086937" [ 1886.482342] env[63279]: _type = "Task" [ 1886.482342] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.492380] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2086937, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.608247] env[63279]: DEBUG nova.compute.manager [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1886.747961] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.735s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.749926] env[63279]: DEBUG nova.compute.manager [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1886.752074] env[63279]: DEBUG oslo_concurrency.lockutils [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.275s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.752251] env[63279]: DEBUG nova.objects.instance [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lazy-loading 'resources' on Instance uuid 3d4db43f-5784-46e1-9710-f6becec011e2 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1886.779995] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529f89f2-48a9-1866-67e6-71f92dd42662, 'name': SearchDatastore_Task, 'duration_secs': 0.013595} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.785086] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.785086] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 2f5e22f6-ba70-4848-965b-eb1553115323/2f5e22f6-ba70-4848-965b-eb1553115323.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1886.785086] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-594d9c73-d7d0-4224-8864-89afefd111a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.787824] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Acquiring lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.788102] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.788330] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Acquiring lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.788523] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.789298] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.792422] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 1886.792422] env[63279]: value = "task-2086938" [ 1886.792422] env[63279]: _type = "Task" [ 1886.792422] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.793360] env[63279]: INFO nova.compute.manager [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Terminating instance [ 1886.808506] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2086938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.996018] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2086937, 'name': ReconfigVM_Task, 'duration_secs': 0.437822} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.996018] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 32dbef6d-d314-4fa6-972a-e7b1f22eb11d/32dbef6d-d314-4fa6-972a-e7b1f22eb11d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1886.996018] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c8409ca-17ad-49d7-913f-314e6fd6688c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.001190] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Waiting for the task: (returnval){ [ 1887.001190] env[63279]: value = "task-2086939" [ 1887.001190] env[63279]: _type = "Task" [ 1887.001190] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.009759] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2086939, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.134957] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.256034] env[63279]: DEBUG nova.compute.utils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1887.257564] env[63279]: DEBUG nova.compute.manager [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1887.257776] env[63279]: DEBUG nova.network.neutron [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1887.307060] env[63279]: DEBUG nova.compute.manager [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1887.307060] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1887.309326] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2cdc099-c41d-4023-8d51-13d10a9d1837 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.323229] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2086938, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.328758] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1887.329070] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46a54ce8-eb1a-47e9-aea5-b6ac677503a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.339610] env[63279]: DEBUG oslo_vmware.api [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1887.339610] env[63279]: value = "task-2086940" [ 1887.339610] env[63279]: _type = "Task" [ 1887.339610] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.341194] env[63279]: DEBUG nova.policy [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b100bacb3d1a4c76a486ce4b994917b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bef567ba43944c65bc5564e099d883c3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1887.355264] env[63279]: DEBUG oslo_vmware.api [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086940, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.512925] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2086939, 'name': Rename_Task, 'duration_secs': 0.287848} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.513383] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1887.513802] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9fbc055-bceb-4acf-9c8d-a54ed7cb696d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.524036] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Waiting for the task: (returnval){ [ 1887.524036] env[63279]: value = "task-2086941" [ 1887.524036] env[63279]: _type = "Task" [ 1887.524036] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.536869] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2086941, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.766720] env[63279]: DEBUG nova.compute.manager [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1887.787530] env[63279]: DEBUG nova.network.neutron [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Successfully created port: 6516d120-aae5-4b20-b18c-9d1fcfbb6f13 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1887.806190] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2086938, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.781667} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.806546] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 2f5e22f6-ba70-4848-965b-eb1553115323/2f5e22f6-ba70-4848-965b-eb1553115323.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1887.806759] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1887.810564] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-77cf0b28-077f-4556-bd4b-228a09c0d8a8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.821030] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 1887.821030] env[63279]: value = "task-2086942" [ 1887.821030] env[63279]: _type = "Task" [ 1887.821030] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.829616] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2086942, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.858285] env[63279]: DEBUG oslo_vmware.api [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086940, 'name': PowerOffVM_Task, 'duration_secs': 0.437329} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.858983] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1887.858983] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1887.859306] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-753798ca-3710-476c-9079-44842e8a824c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.953248] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88b56c3-01f6-4561-b913-97a1f96b8448 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.961982] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5e7ae4-5ad9-4448-836b-02454c632cb3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.996243] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789ada5e-8780-4abb-bf84-43b6f1798cdd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.004956] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516ad3c0-a7d1-4b33-94dd-3c431d1a8cb2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.021715] env[63279]: DEBUG nova.compute.provider_tree [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1888.031670] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1888.032241] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1888.032241] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Deleting the datastore file [datastore1] 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1888.032418] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce49ab43-fdf4-4fad-b773-14ed98fdfd77 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.039649] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2086941, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.041042] env[63279]: DEBUG oslo_vmware.api [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for the task: (returnval){ [ 1888.041042] env[63279]: value = "task-2086944" [ 1888.041042] env[63279]: _type = "Task" [ 1888.041042] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.050462] env[63279]: DEBUG oslo_vmware.api [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086944, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.329291] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2086942, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071221} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.329597] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1888.330385] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0699e87-4e1d-41f2-a4ae-df27b747b1e6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.353777] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 2f5e22f6-ba70-4848-965b-eb1553115323/2f5e22f6-ba70-4848-965b-eb1553115323.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1888.354380] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b14356e2-c03f-4d21-957f-ac69a4a8e340 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.374444] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 1888.374444] env[63279]: value = "task-2086945" [ 1888.374444] env[63279]: _type = "Task" [ 1888.374444] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.382598] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2086945, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.535131] env[63279]: DEBUG oslo_vmware.api [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2086941, 'name': PowerOnVM_Task, 'duration_secs': 0.529291} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.535418] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1888.535804] env[63279]: INFO nova.compute.manager [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Took 9.31 seconds to spawn the instance on the hypervisor. [ 1888.535804] env[63279]: DEBUG nova.compute.manager [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1888.536599] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d8789b-2e70-4ff5-a315-df02d2b67a93 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.555542] env[63279]: ERROR nova.scheduler.client.report [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [req-37b15392-7995-412c-b964-61022355d7bc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-37b15392-7995-412c-b964-61022355d7bc"}]} [ 1888.565096] env[63279]: DEBUG oslo_vmware.api [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086944, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.578041] env[63279]: DEBUG nova.scheduler.client.report [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1888.593282] env[63279]: DEBUG nova.scheduler.client.report [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1888.593516] env[63279]: DEBUG nova.compute.provider_tree [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1888.605987] env[63279]: DEBUG nova.scheduler.client.report [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1888.633570] env[63279]: DEBUG nova.scheduler.client.report [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1888.773892] env[63279]: DEBUG nova.compute.manager [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1888.796352] env[63279]: DEBUG nova.virt.hardware [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1888.796623] env[63279]: DEBUG nova.virt.hardware [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1888.796779] env[63279]: DEBUG nova.virt.hardware [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1888.796968] env[63279]: DEBUG nova.virt.hardware [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1888.797164] env[63279]: DEBUG nova.virt.hardware [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1888.797340] env[63279]: DEBUG nova.virt.hardware [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1888.797596] env[63279]: DEBUG nova.virt.hardware [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1888.797781] env[63279]: DEBUG nova.virt.hardware [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1888.797953] env[63279]: DEBUG nova.virt.hardware [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1888.798153] env[63279]: DEBUG nova.virt.hardware [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1888.798376] env[63279]: DEBUG nova.virt.hardware [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1888.799265] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b5e1d7e-3a3c-488f-966a-9674c4d0de28 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.807537] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c7e25b-2167-4735-bef5-3e1b2ca342e7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.883753] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2086945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.054042] env[63279]: DEBUG oslo_vmware.api [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Task: {'id': task-2086944, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.6222} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.054341] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1889.054528] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1889.054711] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1889.054886] env[63279]: INFO nova.compute.manager [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Took 1.75 seconds to destroy the instance on the hypervisor. [ 1889.055150] env[63279]: DEBUG oslo.service.loopingcall [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1889.059343] env[63279]: DEBUG nova.compute.manager [-] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1889.059442] env[63279]: DEBUG nova.network.neutron [-] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1889.064306] env[63279]: INFO nova.compute.manager [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Took 43.45 seconds to build instance. [ 1889.222529] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da967f58-ee9b-45bb-b4e4-1d64ff02f674 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.232970] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b14032-ad65-4556-8d0f-bb96cabdc463 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.274236] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b776e05e-1308-400a-9d40-7c5804aebde8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.283238] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574d0cba-2134-45d5-aa3c-d0074c4dd7ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.298996] env[63279]: DEBUG nova.compute.provider_tree [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1889.336864] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "eca98392-98be-405b-b799-463ef9ee3dc8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.337174] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "eca98392-98be-405b-b799-463ef9ee3dc8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.384895] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2086945, 'name': ReconfigVM_Task, 'duration_secs': 0.984787} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.385687] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 2f5e22f6-ba70-4848-965b-eb1553115323/2f5e22f6-ba70-4848-965b-eb1553115323.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1889.386858] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b740668d-8a58-4589-9d29-91badc41edf3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.394838] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 1889.394838] env[63279]: value = "task-2086946" [ 1889.394838] env[63279]: _type = "Task" [ 1889.394838] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.404466] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2086946, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.566790] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3e128472-436c-4de9-a273-dfc54e42830f tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Lock "32dbef6d-d314-4fa6-972a-e7b1f22eb11d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.700s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.736113] env[63279]: DEBUG nova.compute.manager [req-0d0151e3-3268-40c2-92c2-b6e73fc8656f req-80f173dc-bddf-4050-a425-d763b46dd31d service nova] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Received event network-vif-plugged-6516d120-aae5-4b20-b18c-9d1fcfbb6f13 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1889.736413] env[63279]: DEBUG oslo_concurrency.lockutils [req-0d0151e3-3268-40c2-92c2-b6e73fc8656f req-80f173dc-bddf-4050-a425-d763b46dd31d service nova] Acquiring lock "a8107fa5-9e8b-41dd-9679-8e106a3496a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.736644] env[63279]: DEBUG oslo_concurrency.lockutils [req-0d0151e3-3268-40c2-92c2-b6e73fc8656f req-80f173dc-bddf-4050-a425-d763b46dd31d service nova] Lock "a8107fa5-9e8b-41dd-9679-8e106a3496a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.736822] env[63279]: DEBUG oslo_concurrency.lockutils [req-0d0151e3-3268-40c2-92c2-b6e73fc8656f req-80f173dc-bddf-4050-a425-d763b46dd31d service nova] Lock "a8107fa5-9e8b-41dd-9679-8e106a3496a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.736996] env[63279]: DEBUG nova.compute.manager [req-0d0151e3-3268-40c2-92c2-b6e73fc8656f req-80f173dc-bddf-4050-a425-d763b46dd31d service nova] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] No waiting events found dispatching network-vif-plugged-6516d120-aae5-4b20-b18c-9d1fcfbb6f13 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1889.737195] env[63279]: WARNING nova.compute.manager [req-0d0151e3-3268-40c2-92c2-b6e73fc8656f req-80f173dc-bddf-4050-a425-d763b46dd31d service nova] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Received unexpected event network-vif-plugged-6516d120-aae5-4b20-b18c-9d1fcfbb6f13 for instance with vm_state building and task_state spawning. [ 1889.801572] env[63279]: DEBUG nova.scheduler.client.report [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1889.904972] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2086946, 'name': Rename_Task, 'duration_secs': 0.32794} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.904972] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1889.904972] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b79c890f-5163-43d2-b69f-e88980e1de99 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.913711] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 1889.913711] env[63279]: value = "task-2086947" [ 1889.913711] env[63279]: _type = "Task" [ 1889.913711] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.923149] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2086947, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.072940] env[63279]: DEBUG nova.compute.manager [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1890.170269] env[63279]: DEBUG nova.network.neutron [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Successfully updated port: 6516d120-aae5-4b20-b18c-9d1fcfbb6f13 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1890.231152] env[63279]: DEBUG nova.network.neutron [-] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1890.307260] env[63279]: DEBUG oslo_concurrency.lockutils [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.555s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.310251] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.279s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.311978] env[63279]: INFO nova.compute.claims [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1890.337577] env[63279]: INFO nova.scheduler.client.report [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Deleted allocations for instance 3d4db43f-5784-46e1-9710-f6becec011e2 [ 1890.425414] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2086947, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.595304] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.673056] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Acquiring lock "refresh_cache-a8107fa5-9e8b-41dd-9679-8e106a3496a5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1890.673056] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Acquired lock "refresh_cache-a8107fa5-9e8b-41dd-9679-8e106a3496a5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1890.673056] env[63279]: DEBUG nova.network.neutron [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1890.733296] env[63279]: INFO nova.compute.manager [-] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Took 1.67 seconds to deallocate network for instance. [ 1890.845636] env[63279]: DEBUG oslo_concurrency.lockutils [None req-57ab2a25-ce86-4ce7-837f-a1aef411ae6f tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "3d4db43f-5784-46e1-9710-f6becec011e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.620s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.925777] env[63279]: DEBUG oslo_vmware.api [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2086947, 'name': PowerOnVM_Task, 'duration_secs': 0.782937} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.926068] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1890.926319] env[63279]: INFO nova.compute.manager [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Took 9.38 seconds to spawn the instance on the hypervisor. [ 1890.926483] env[63279]: DEBUG nova.compute.manager [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1890.927241] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d120d8-3968-4dd7-a72a-c31b08c3911a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.207524] env[63279]: DEBUG nova.network.neutron [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1891.240170] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.372328] env[63279]: DEBUG nova.network.neutron [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Updating instance_info_cache with network_info: [{"id": "6516d120-aae5-4b20-b18c-9d1fcfbb6f13", "address": "fa:16:3e:f4:81:f1", "network": {"id": "ecb5710e-afd1-413e-b38d-cc34e5ebd28e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1841189191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef567ba43944c65bc5564e099d883c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6516d120-aa", "ovs_interfaceid": "6516d120-aae5-4b20-b18c-9d1fcfbb6f13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.445771] env[63279]: INFO nova.compute.manager [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Took 44.46 seconds to build instance. [ 1891.780838] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dffb7793-e07c-4410-9e73-773ff1c1eda2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.787328] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3187f4de-cf49-42e4-8175-48ebbe9fcdec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.820068] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117a41c8-6c63-40e5-9de6-2bdd8d192e35 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.827756] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c9c930-4367-4318-92bd-8450c30e530f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.849420] env[63279]: DEBUG nova.compute.provider_tree [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1891.875831] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Releasing lock "refresh_cache-a8107fa5-9e8b-41dd-9679-8e106a3496a5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.879160] env[63279]: DEBUG nova.compute.manager [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Instance network_info: |[{"id": "6516d120-aae5-4b20-b18c-9d1fcfbb6f13", "address": "fa:16:3e:f4:81:f1", "network": {"id": "ecb5710e-afd1-413e-b38d-cc34e5ebd28e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1841189191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef567ba43944c65bc5564e099d883c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6516d120-aa", "ovs_interfaceid": "6516d120-aae5-4b20-b18c-9d1fcfbb6f13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1891.879455] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:81:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6516d120-aae5-4b20-b18c-9d1fcfbb6f13', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1891.884405] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Creating folder: Project (bef567ba43944c65bc5564e099d883c3). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1891.885014] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96202bbc-31e6-4fba-8395-845796b2c3ba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.901633] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Created folder: Project (bef567ba43944c65bc5564e099d883c3) in parent group-v427491. [ 1891.901840] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Creating folder: Instances. Parent ref: group-v427573. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1891.902097] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6152fc47-8ff9-4c9d-8efd-fda16db8c5e9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.911605] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Created folder: Instances in parent group-v427573. [ 1891.911757] env[63279]: DEBUG oslo.service.loopingcall [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1891.912014] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1891.912246] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4697fa5-2a9f-4252-bc0e-633519ac7763 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.931378] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1891.931378] env[63279]: value = "task-2086950" [ 1891.931378] env[63279]: _type = "Task" [ 1891.931378] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.945389] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086950, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.948437] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90efc3b8-a656-4eb0-8ac7-af93c9b6bce9 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "2f5e22f6-ba70-4848-965b-eb1553115323" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.320s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.957822] env[63279]: DEBUG nova.compute.manager [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Received event network-vif-deleted-dc489c36-2ef9-4f47-aadb-4f6503bb9d1b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1891.957822] env[63279]: DEBUG nova.compute.manager [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Received event network-changed-6516d120-aae5-4b20-b18c-9d1fcfbb6f13 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1891.957822] env[63279]: DEBUG nova.compute.manager [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Refreshing instance network info cache due to event network-changed-6516d120-aae5-4b20-b18c-9d1fcfbb6f13. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1891.958168] env[63279]: DEBUG oslo_concurrency.lockutils [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] Acquiring lock "refresh_cache-a8107fa5-9e8b-41dd-9679-8e106a3496a5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.958168] env[63279]: DEBUG oslo_concurrency.lockutils [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] Acquired lock "refresh_cache-a8107fa5-9e8b-41dd-9679-8e106a3496a5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.958168] env[63279]: DEBUG nova.network.neutron [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Refreshing network info cache for port 6516d120-aae5-4b20-b18c-9d1fcfbb6f13 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1892.352350] env[63279]: DEBUG nova.scheduler.client.report [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1892.406805] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "32789822-cb54-43e7-beae-b5ed3002f4ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.407061] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "32789822-cb54-43e7-beae-b5ed3002f4ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.441373] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086950, 'name': CreateVM_Task, 'duration_secs': 0.444918} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.441538] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1892.442178] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.442341] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.442648] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1892.442882] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59e4d041-0c13-49c1-8692-2cff162cf27d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.448105] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "1b2ca21b-feea-4fc1-9ddc-99f144e4241a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.448332] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "1b2ca21b-feea-4fc1-9ddc-99f144e4241a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.450603] env[63279]: DEBUG nova.compute.manager [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1892.453521] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Waiting for the task: (returnval){ [ 1892.453521] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5232b73f-0535-4fc7-5c5a-4777d5ad0f80" [ 1892.453521] env[63279]: _type = "Task" [ 1892.453521] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.463030] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5232b73f-0535-4fc7-5c5a-4777d5ad0f80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.470427] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "fb124cfa-24b4-4712-b8cc-c87df5d6231b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.470686] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "fb124cfa-24b4-4712-b8cc-c87df5d6231b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.694151] env[63279]: DEBUG nova.network.neutron [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Updated VIF entry in instance network info cache for port 6516d120-aae5-4b20-b18c-9d1fcfbb6f13. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1892.694770] env[63279]: DEBUG nova.network.neutron [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Updating instance_info_cache with network_info: [{"id": "6516d120-aae5-4b20-b18c-9d1fcfbb6f13", "address": "fa:16:3e:f4:81:f1", "network": {"id": "ecb5710e-afd1-413e-b38d-cc34e5ebd28e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1841189191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef567ba43944c65bc5564e099d883c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6516d120-aa", "ovs_interfaceid": "6516d120-aae5-4b20-b18c-9d1fcfbb6f13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.857681] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.548s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.858576] env[63279]: DEBUG nova.compute.manager [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1892.861237] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.915s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.862971] env[63279]: INFO nova.compute.claims [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1892.975312] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5232b73f-0535-4fc7-5c5a-4777d5ad0f80, 'name': SearchDatastore_Task, 'duration_secs': 0.014149} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.975633] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.975950] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1892.976371] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.976620] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.976918] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1892.977285] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c74fddec-8116-47f6-b388-95ff12f2353b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.981070] env[63279]: DEBUG oslo_concurrency.lockutils [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.992220] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1892.992519] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1892.993689] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fd87c0a-ff82-4435-96ef-da0e6ca7c68e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.001143] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Waiting for the task: (returnval){ [ 1893.001143] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52fecc11-fcb9-1f33-04f0-54ec462ac514" [ 1893.001143] env[63279]: _type = "Task" [ 1893.001143] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.012661] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fecc11-fcb9-1f33-04f0-54ec462ac514, 'name': SearchDatastore_Task, 'duration_secs': 0.009272} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.013446] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5ec9bdb-d2ba-4cbb-9252-16d7bfbcb545 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.018606] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Waiting for the task: (returnval){ [ 1893.018606] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52db7d30-d96c-3d88-549e-10fed6a77118" [ 1893.018606] env[63279]: _type = "Task" [ 1893.018606] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.026556] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52db7d30-d96c-3d88-549e-10fed6a77118, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.197690] env[63279]: DEBUG oslo_concurrency.lockutils [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] Releasing lock "refresh_cache-a8107fa5-9e8b-41dd-9679-8e106a3496a5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.197976] env[63279]: DEBUG nova.compute.manager [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Received event network-changed-d88bfcf3-27a0-4cbc-b13c-12cb6a612415 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1893.198163] env[63279]: DEBUG nova.compute.manager [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Refreshing instance network info cache due to event network-changed-d88bfcf3-27a0-4cbc-b13c-12cb6a612415. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1893.198384] env[63279]: DEBUG oslo_concurrency.lockutils [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] Acquiring lock "refresh_cache-32dbef6d-d314-4fa6-972a-e7b1f22eb11d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.198529] env[63279]: DEBUG oslo_concurrency.lockutils [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] Acquired lock "refresh_cache-32dbef6d-d314-4fa6-972a-e7b1f22eb11d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.198690] env[63279]: DEBUG nova.network.neutron [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Refreshing network info cache for port d88bfcf3-27a0-4cbc-b13c-12cb6a612415 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1893.368183] env[63279]: DEBUG nova.compute.utils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1893.371682] env[63279]: DEBUG nova.compute.manager [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1893.371931] env[63279]: DEBUG nova.network.neutron [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1893.433638] env[63279]: DEBUG nova.policy [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92ab38bbf74d4cccb2637786a651ed5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ea345c4bc9b4781acb6f774ac88c690', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1893.533556] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52db7d30-d96c-3d88-549e-10fed6a77118, 'name': SearchDatastore_Task, 'duration_secs': 0.008026} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.533949] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.534353] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] a8107fa5-9e8b-41dd-9679-8e106a3496a5/a8107fa5-9e8b-41dd-9679-8e106a3496a5.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1893.534717] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07d4294a-4d47-4d99-af5e-d6f20abcf3e9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.543585] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Waiting for the task: (returnval){ [ 1893.543585] env[63279]: value = "task-2086951" [ 1893.543585] env[63279]: _type = "Task" [ 1893.543585] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.555278] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086951, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.770213] env[63279]: DEBUG nova.network.neutron [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Successfully created port: 180a3189-be6a-456b-a89c-d5ab98c3e461 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1893.879206] env[63279]: DEBUG nova.compute.manager [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1894.056413] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086951, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.139503] env[63279]: DEBUG nova.network.neutron [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Updated VIF entry in instance network info cache for port d88bfcf3-27a0-4cbc-b13c-12cb6a612415. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1894.139503] env[63279]: DEBUG nova.network.neutron [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Updating instance_info_cache with network_info: [{"id": "d88bfcf3-27a0-4cbc-b13c-12cb6a612415", "address": "fa:16:3e:e7:de:d2", "network": {"id": "05023788-bf00-479a-85fa-7f5839b525d8", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1571793632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d496723f0a4e4e3aac1cc2895854d3a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd88bfcf3-27", "ovs_interfaceid": "d88bfcf3-27a0-4cbc-b13c-12cb6a612415", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.507689] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfa6656-d708-42a2-8a8a-ae97a894f863 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.518332] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1995e6ec-6b4c-486a-a486-de9fda33acab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.556625] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a4847b-6f4c-4aac-8db4-31cfa3891940 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.565986] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086951, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.79951} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.568070] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] a8107fa5-9e8b-41dd-9679-8e106a3496a5/a8107fa5-9e8b-41dd-9679-8e106a3496a5.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1894.568298] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1894.568580] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-61afd8c7-bcb5-41d6-8d81-75bdb24f1c28 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.571268] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ddd1386-cfd7-43ce-ae2f-fe1fcd709fc4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.585156] env[63279]: DEBUG nova.compute.provider_tree [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1894.587677] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Waiting for the task: (returnval){ [ 1894.587677] env[63279]: value = "task-2086952" [ 1894.587677] env[63279]: _type = "Task" [ 1894.587677] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.595234] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086952, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.642587] env[63279]: DEBUG oslo_concurrency.lockutils [req-9f24cdec-5342-4ee7-aa9d-912b3d259d1a req-023d3a72-3d74-43e8-8a38-1f4c1a3136ef service nova] Releasing lock "refresh_cache-32dbef6d-d314-4fa6-972a-e7b1f22eb11d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.891096] env[63279]: DEBUG nova.compute.manager [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1894.915736] env[63279]: DEBUG nova.virt.hardware [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1894.915990] env[63279]: DEBUG nova.virt.hardware [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1894.916165] env[63279]: DEBUG nova.virt.hardware [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1894.916378] env[63279]: DEBUG nova.virt.hardware [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1894.916530] env[63279]: DEBUG nova.virt.hardware [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1894.916685] env[63279]: DEBUG nova.virt.hardware [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1894.916900] env[63279]: DEBUG nova.virt.hardware [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1894.917075] env[63279]: DEBUG nova.virt.hardware [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1894.917249] env[63279]: DEBUG nova.virt.hardware [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1894.917453] env[63279]: DEBUG nova.virt.hardware [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1894.917667] env[63279]: DEBUG nova.virt.hardware [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1894.918813] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79553c1c-34e3-4d73-89a2-57e315d2a6bf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.927363] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a20a1ce-4647-43e4-91d7-2f6129824873 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.089791] env[63279]: DEBUG nova.scheduler.client.report [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1895.102127] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086952, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074257} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.102267] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1895.103130] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6cb76b-ad2e-41ee-bce8-7af17b40ab1a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.126818] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] a8107fa5-9e8b-41dd-9679-8e106a3496a5/a8107fa5-9e8b-41dd-9679-8e106a3496a5.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1895.127306] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f85dc095-b133-4bb7-a5fa-ccb4e265c1ef {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.148016] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Waiting for the task: (returnval){ [ 1895.148016] env[63279]: value = "task-2086953" [ 1895.148016] env[63279]: _type = "Task" [ 1895.148016] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.157795] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086953, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.435912] env[63279]: DEBUG nova.compute.manager [req-3c239c2c-c5b6-4497-a3e4-e7fc96b56051 req-2cb92f98-bdd6-47b3-bf33-cbbce3a4fd92 service nova] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Received event network-vif-plugged-180a3189-be6a-456b-a89c-d5ab98c3e461 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1895.436149] env[63279]: DEBUG oslo_concurrency.lockutils [req-3c239c2c-c5b6-4497-a3e4-e7fc96b56051 req-2cb92f98-bdd6-47b3-bf33-cbbce3a4fd92 service nova] Acquiring lock "f20e8eb2-847b-4988-abf9-ed5f9f65c25c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.436380] env[63279]: DEBUG oslo_concurrency.lockutils [req-3c239c2c-c5b6-4497-a3e4-e7fc96b56051 req-2cb92f98-bdd6-47b3-bf33-cbbce3a4fd92 service nova] Lock "f20e8eb2-847b-4988-abf9-ed5f9f65c25c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.436554] env[63279]: DEBUG oslo_concurrency.lockutils [req-3c239c2c-c5b6-4497-a3e4-e7fc96b56051 req-2cb92f98-bdd6-47b3-bf33-cbbce3a4fd92 service nova] Lock "f20e8eb2-847b-4988-abf9-ed5f9f65c25c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.436746] env[63279]: DEBUG nova.compute.manager [req-3c239c2c-c5b6-4497-a3e4-e7fc96b56051 req-2cb92f98-bdd6-47b3-bf33-cbbce3a4fd92 service nova] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] No waiting events found dispatching network-vif-plugged-180a3189-be6a-456b-a89c-d5ab98c3e461 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1895.436870] env[63279]: WARNING nova.compute.manager [req-3c239c2c-c5b6-4497-a3e4-e7fc96b56051 req-2cb92f98-bdd6-47b3-bf33-cbbce3a4fd92 service nova] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Received unexpected event network-vif-plugged-180a3189-be6a-456b-a89c-d5ab98c3e461 for instance with vm_state building and task_state spawning. [ 1895.570930] env[63279]: DEBUG nova.network.neutron [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Successfully updated port: 180a3189-be6a-456b-a89c-d5ab98c3e461 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1895.597832] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.736s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.598370] env[63279]: DEBUG nova.compute.manager [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1895.606619] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.554s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.608775] env[63279]: INFO nova.compute.claims [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1895.659388] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086953, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.074462] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "refresh_cache-f20e8eb2-847b-4988-abf9-ed5f9f65c25c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1896.074462] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquired lock "refresh_cache-f20e8eb2-847b-4988-abf9-ed5f9f65c25c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1896.074641] env[63279]: DEBUG nova.network.neutron [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1896.107955] env[63279]: DEBUG nova.compute.utils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1896.109606] env[63279]: DEBUG nova.compute.manager [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1896.109841] env[63279]: DEBUG nova.network.neutron [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1896.154540] env[63279]: DEBUG nova.policy [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17fbc0127a5944ac933232873f282980', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '77ee2145dda94e2b85eeb7379ed98e26', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1896.161950] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086953, 'name': ReconfigVM_Task, 'duration_secs': 0.694337} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.161999] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Reconfigured VM instance instance-0000001b to attach disk [datastore1] a8107fa5-9e8b-41dd-9679-8e106a3496a5/a8107fa5-9e8b-41dd-9679-8e106a3496a5.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1896.162673] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f24f1db6-30a8-416b-b1d2-b850dd7856e8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.170225] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Waiting for the task: (returnval){ [ 1896.170225] env[63279]: value = "task-2086954" [ 1896.170225] env[63279]: _type = "Task" [ 1896.170225] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.180483] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086954, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.471727] env[63279]: DEBUG nova.network.neutron [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Successfully created port: 61aa2656-cef9-4e23-b276-4328c1aeab6b {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1896.613204] env[63279]: DEBUG nova.compute.manager [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1896.630585] env[63279]: DEBUG nova.network.neutron [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1896.686746] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086954, 'name': Rename_Task, 'duration_secs': 0.138339} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.690269] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1896.692874] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7133e077-9368-46e4-a7ca-f3273f334bd8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.699558] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Waiting for the task: (returnval){ [ 1896.699558] env[63279]: value = "task-2086955" [ 1896.699558] env[63279]: _type = "Task" [ 1896.699558] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.714351] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086955, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.916753] env[63279]: DEBUG nova.network.neutron [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Updating instance_info_cache with network_info: [{"id": "180a3189-be6a-456b-a89c-d5ab98c3e461", "address": "fa:16:3e:59:0b:d8", "network": {"id": "6b1ead98-699d-4ad0-bb54-2a4fa09faf6d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-861811231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ea345c4bc9b4781acb6f774ac88c690", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap180a3189-be", "ovs_interfaceid": "180a3189-be6a-456b-a89c-d5ab98c3e461", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1897.178488] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f739a5a7-bf76-458d-9bd2-e11ace37da7d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.185917] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f954f5e6-75ed-4b65-af3e-efcf999eff1c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.219955] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9fa876e-807f-4f6b-a5ae-8cef73a70056 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.230170] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b09732-2a1a-4b96-bc8d-4e8cb168c637 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.234023] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086955, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.243713] env[63279]: DEBUG nova.compute.provider_tree [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1897.425224] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Releasing lock "refresh_cache-f20e8eb2-847b-4988-abf9-ed5f9f65c25c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1897.425575] env[63279]: DEBUG nova.compute.manager [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Instance network_info: |[{"id": "180a3189-be6a-456b-a89c-d5ab98c3e461", "address": "fa:16:3e:59:0b:d8", "network": {"id": "6b1ead98-699d-4ad0-bb54-2a4fa09faf6d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-861811231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ea345c4bc9b4781acb6f774ac88c690", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap180a3189-be", "ovs_interfaceid": "180a3189-be6a-456b-a89c-d5ab98c3e461", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1897.426024] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:0b:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '180a3189-be6a-456b-a89c-d5ab98c3e461', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1897.433357] env[63279]: DEBUG oslo.service.loopingcall [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1897.433568] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1897.433789] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8624402f-3ae8-4932-adb9-baea095d2092 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.454527] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1897.454527] env[63279]: value = "task-2086956" [ 1897.454527] env[63279]: _type = "Task" [ 1897.454527] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.460051] env[63279]: DEBUG nova.compute.manager [req-a7151979-8f5e-4ea0-8769-5defcacad70a req-8ae52ef5-b988-4f28-b64a-52018678e069 service nova] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Received event network-changed-180a3189-be6a-456b-a89c-d5ab98c3e461 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1897.460051] env[63279]: DEBUG nova.compute.manager [req-a7151979-8f5e-4ea0-8769-5defcacad70a req-8ae52ef5-b988-4f28-b64a-52018678e069 service nova] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Refreshing instance network info cache due to event network-changed-180a3189-be6a-456b-a89c-d5ab98c3e461. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1897.460160] env[63279]: DEBUG oslo_concurrency.lockutils [req-a7151979-8f5e-4ea0-8769-5defcacad70a req-8ae52ef5-b988-4f28-b64a-52018678e069 service nova] Acquiring lock "refresh_cache-f20e8eb2-847b-4988-abf9-ed5f9f65c25c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1897.460305] env[63279]: DEBUG oslo_concurrency.lockutils [req-a7151979-8f5e-4ea0-8769-5defcacad70a req-8ae52ef5-b988-4f28-b64a-52018678e069 service nova] Acquired lock "refresh_cache-f20e8eb2-847b-4988-abf9-ed5f9f65c25c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1897.460465] env[63279]: DEBUG nova.network.neutron [req-a7151979-8f5e-4ea0-8769-5defcacad70a req-8ae52ef5-b988-4f28-b64a-52018678e069 service nova] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Refreshing network info cache for port 180a3189-be6a-456b-a89c-d5ab98c3e461 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1897.466923] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086956, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.631163] env[63279]: DEBUG nova.compute.manager [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1897.657386] env[63279]: DEBUG nova.virt.hardware [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1897.657643] env[63279]: DEBUG nova.virt.hardware [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1897.657816] env[63279]: DEBUG nova.virt.hardware [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1897.658041] env[63279]: DEBUG nova.virt.hardware [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1897.658220] env[63279]: DEBUG nova.virt.hardware [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1897.658459] env[63279]: DEBUG nova.virt.hardware [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1897.658717] env[63279]: DEBUG nova.virt.hardware [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1897.658906] env[63279]: DEBUG nova.virt.hardware [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1897.659119] env[63279]: DEBUG nova.virt.hardware [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1897.659315] env[63279]: DEBUG nova.virt.hardware [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1897.659523] env[63279]: DEBUG nova.virt.hardware [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1897.660428] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212a7d60-8542-46bd-bfa0-3d8a35055aa0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.668558] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b64946f-4610-48de-bd0f-9d61ba32cbba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.725681] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086955, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.747249] env[63279]: DEBUG nova.scheduler.client.report [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1897.967626] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086956, 'name': CreateVM_Task, 'duration_secs': 0.385858} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.967810] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1897.968696] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1897.968899] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1897.969231] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1897.969481] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a8a206c-1fa2-481f-8536-46eed8a928a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.974319] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1897.974319] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526e1de4-0d7e-6b0d-c511-717d9ba790e7" [ 1897.974319] env[63279]: _type = "Task" [ 1897.974319] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.982398] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526e1de4-0d7e-6b0d-c511-717d9ba790e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.225426] env[63279]: DEBUG oslo_vmware.api [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086955, 'name': PowerOnVM_Task, 'duration_secs': 1.164687} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.227974] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1898.228209] env[63279]: INFO nova.compute.manager [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Took 9.45 seconds to spawn the instance on the hypervisor. [ 1898.228390] env[63279]: DEBUG nova.compute.manager [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1898.229178] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a2f9bb-d020-4eb7-86ae-485eabab457f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.252787] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.646s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.253353] env[63279]: DEBUG nova.compute.manager [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1898.255889] env[63279]: DEBUG oslo_concurrency.lockutils [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.810s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.256032] env[63279]: DEBUG nova.objects.instance [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1898.367355] env[63279]: DEBUG nova.network.neutron [req-a7151979-8f5e-4ea0-8769-5defcacad70a req-8ae52ef5-b988-4f28-b64a-52018678e069 service nova] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Updated VIF entry in instance network info cache for port 180a3189-be6a-456b-a89c-d5ab98c3e461. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1898.367754] env[63279]: DEBUG nova.network.neutron [req-a7151979-8f5e-4ea0-8769-5defcacad70a req-8ae52ef5-b988-4f28-b64a-52018678e069 service nova] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Updating instance_info_cache with network_info: [{"id": "180a3189-be6a-456b-a89c-d5ab98c3e461", "address": "fa:16:3e:59:0b:d8", "network": {"id": "6b1ead98-699d-4ad0-bb54-2a4fa09faf6d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-861811231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ea345c4bc9b4781acb6f774ac88c690", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap180a3189-be", "ovs_interfaceid": "180a3189-be6a-456b-a89c-d5ab98c3e461", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.444470] env[63279]: DEBUG nova.network.neutron [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Successfully updated port: 61aa2656-cef9-4e23-b276-4328c1aeab6b {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1898.486191] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526e1de4-0d7e-6b0d-c511-717d9ba790e7, 'name': SearchDatastore_Task, 'duration_secs': 0.009937} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.486538] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.486779] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1898.487027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.487170] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.487341] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1898.489030] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd5f32a4-dbaf-488e-b8cd-45444c9bf6ce {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.496446] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1898.496688] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1898.497407] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be33755d-852e-4a54-a865-e7b806db7fd8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.502412] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1898.502412] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527a604f-0b5f-3d07-d403-3ff31f6f3dd8" [ 1898.502412] env[63279]: _type = "Task" [ 1898.502412] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.510463] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527a604f-0b5f-3d07-d403-3ff31f6f3dd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.745463] env[63279]: INFO nova.compute.manager [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Took 46.48 seconds to build instance. [ 1898.763398] env[63279]: DEBUG nova.compute.utils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1898.764917] env[63279]: DEBUG nova.compute.manager [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1898.765129] env[63279]: DEBUG nova.network.neutron [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1898.815357] env[63279]: DEBUG nova.policy [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c9e365ca4f941f19cd9e82676a52acc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f6c6f65521a440fb80278bbff2d0ed0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1898.871196] env[63279]: DEBUG oslo_concurrency.lockutils [req-a7151979-8f5e-4ea0-8769-5defcacad70a req-8ae52ef5-b988-4f28-b64a-52018678e069 service nova] Releasing lock "refresh_cache-f20e8eb2-847b-4988-abf9-ed5f9f65c25c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.947671] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "refresh_cache-5572bb1c-b378-4531-8459-10c2a2b7afdf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.947874] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "refresh_cache-5572bb1c-b378-4531-8459-10c2a2b7afdf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.948035] env[63279]: DEBUG nova.network.neutron [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1899.018020] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527a604f-0b5f-3d07-d403-3ff31f6f3dd8, 'name': SearchDatastore_Task, 'duration_secs': 0.008017} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.018020] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92024849-3c82-4b6a-8851-e615925b5a7a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.022067] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1899.022067] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5283aea5-c02c-c8ea-67f5-f320a5679d09" [ 1899.022067] env[63279]: _type = "Task" [ 1899.022067] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.034295] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5283aea5-c02c-c8ea-67f5-f320a5679d09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.247998] env[63279]: DEBUG oslo_concurrency.lockutils [None req-018b6b0c-2b29-423d-bb8d-c1cfe503a363 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Lock "a8107fa5-9e8b-41dd-9679-8e106a3496a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.475s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.263779] env[63279]: DEBUG oslo_concurrency.lockutils [None req-280cd432-3b9b-4c5d-8833-1157015e8549 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.264823] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 26.109s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.266875] env[63279]: DEBUG nova.compute.manager [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1899.404423] env[63279]: DEBUG nova.network.neutron [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Successfully created port: d61892fc-4ba7-43db-8d82-c981cf8c7008 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1899.423201] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Acquiring lock "a8107fa5-9e8b-41dd-9679-8e106a3496a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.423474] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Lock "a8107fa5-9e8b-41dd-9679-8e106a3496a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.423693] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Acquiring lock "a8107fa5-9e8b-41dd-9679-8e106a3496a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.424171] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Lock "a8107fa5-9e8b-41dd-9679-8e106a3496a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.424171] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Lock "a8107fa5-9e8b-41dd-9679-8e106a3496a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.428675] env[63279]: INFO nova.compute.manager [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Terminating instance [ 1899.494559] env[63279]: DEBUG nova.network.neutron [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1899.536887] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5283aea5-c02c-c8ea-67f5-f320a5679d09, 'name': SearchDatastore_Task, 'duration_secs': 0.010824} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.537185] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1899.537693] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] f20e8eb2-847b-4988-abf9-ed5f9f65c25c/f20e8eb2-847b-4988-abf9-ed5f9f65c25c.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1899.537977] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71718b15-ad95-4524-8bcd-ecd12d9ec73a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.544952] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1899.544952] env[63279]: value = "task-2086957" [ 1899.544952] env[63279]: _type = "Task" [ 1899.544952] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.556859] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086957, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.575130] env[63279]: DEBUG nova.compute.manager [req-9b5b3645-2079-427a-94e3-d6ba77f32d59 req-60159f0e-f2c1-4b91-994e-ede5a30e0d30 service nova] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Received event network-vif-plugged-61aa2656-cef9-4e23-b276-4328c1aeab6b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1899.575360] env[63279]: DEBUG oslo_concurrency.lockutils [req-9b5b3645-2079-427a-94e3-d6ba77f32d59 req-60159f0e-f2c1-4b91-994e-ede5a30e0d30 service nova] Acquiring lock "5572bb1c-b378-4531-8459-10c2a2b7afdf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.575643] env[63279]: DEBUG oslo_concurrency.lockutils [req-9b5b3645-2079-427a-94e3-d6ba77f32d59 req-60159f0e-f2c1-4b91-994e-ede5a30e0d30 service nova] Lock "5572bb1c-b378-4531-8459-10c2a2b7afdf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.576487] env[63279]: DEBUG oslo_concurrency.lockutils [req-9b5b3645-2079-427a-94e3-d6ba77f32d59 req-60159f0e-f2c1-4b91-994e-ede5a30e0d30 service nova] Lock "5572bb1c-b378-4531-8459-10c2a2b7afdf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.576697] env[63279]: DEBUG nova.compute.manager [req-9b5b3645-2079-427a-94e3-d6ba77f32d59 req-60159f0e-f2c1-4b91-994e-ede5a30e0d30 service nova] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] No waiting events found dispatching network-vif-plugged-61aa2656-cef9-4e23-b276-4328c1aeab6b {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1899.576888] env[63279]: WARNING nova.compute.manager [req-9b5b3645-2079-427a-94e3-d6ba77f32d59 req-60159f0e-f2c1-4b91-994e-ede5a30e0d30 service nova] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Received unexpected event network-vif-plugged-61aa2656-cef9-4e23-b276-4328c1aeab6b for instance with vm_state building and task_state spawning. [ 1899.577077] env[63279]: DEBUG nova.compute.manager [req-9b5b3645-2079-427a-94e3-d6ba77f32d59 req-60159f0e-f2c1-4b91-994e-ede5a30e0d30 service nova] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Received event network-changed-61aa2656-cef9-4e23-b276-4328c1aeab6b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1899.577248] env[63279]: DEBUG nova.compute.manager [req-9b5b3645-2079-427a-94e3-d6ba77f32d59 req-60159f0e-f2c1-4b91-994e-ede5a30e0d30 service nova] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Refreshing instance network info cache due to event network-changed-61aa2656-cef9-4e23-b276-4328c1aeab6b. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1899.577416] env[63279]: DEBUG oslo_concurrency.lockutils [req-9b5b3645-2079-427a-94e3-d6ba77f32d59 req-60159f0e-f2c1-4b91-994e-ede5a30e0d30 service nova] Acquiring lock "refresh_cache-5572bb1c-b378-4531-8459-10c2a2b7afdf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1899.751264] env[63279]: DEBUG nova.compute.manager [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1899.816776] env[63279]: DEBUG nova.network.neutron [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Updating instance_info_cache with network_info: [{"id": "61aa2656-cef9-4e23-b276-4328c1aeab6b", "address": "fa:16:3e:c8:ff:4e", "network": {"id": "6d7c4c21-3389-4688-b373-329b2328f275", "bridge": "br-int", "label": "tempest-ImagesTestJSON-311425692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77ee2145dda94e2b85eeb7379ed98e26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61aa2656-ce", "ovs_interfaceid": "61aa2656-cef9-4e23-b276-4328c1aeab6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1899.932763] env[63279]: DEBUG nova.compute.manager [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1899.933080] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1899.934324] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13898910-42bb-41c4-917d-7ee2b87f0c48 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.942506] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1899.942787] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef96e061-4415-4324-b386-93d1683448b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.953213] env[63279]: DEBUG oslo_vmware.api [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Waiting for the task: (returnval){ [ 1899.953213] env[63279]: value = "task-2086958" [ 1899.953213] env[63279]: _type = "Task" [ 1899.953213] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.965125] env[63279]: DEBUG oslo_vmware.api [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086958, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.057233] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086957, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.275785] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1900.287337] env[63279]: DEBUG nova.compute.manager [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1900.290167] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Applying migration context for instance 5656c853-ac83-47be-83c4-979a9e87ab91 as it has an incoming, in-progress migration 9d057922-d0fc-4486-a0ee-a4b999090b3b. Migration status is confirming {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1900.292396] env[63279]: INFO nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating resource usage from migration 9d057922-d0fc-4486-a0ee-a4b999090b3b [ 1900.315356] env[63279]: DEBUG nova.virt.hardware [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1900.315356] env[63279]: DEBUG nova.virt.hardware [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1900.315356] env[63279]: DEBUG nova.virt.hardware [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1900.315578] env[63279]: DEBUG nova.virt.hardware [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1900.315578] env[63279]: DEBUG nova.virt.hardware [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1900.315687] env[63279]: DEBUG nova.virt.hardware [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1900.315918] env[63279]: DEBUG nova.virt.hardware [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1900.316411] env[63279]: DEBUG nova.virt.hardware [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1900.316411] env[63279]: DEBUG nova.virt.hardware [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1900.316562] env[63279]: DEBUG nova.virt.hardware [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1900.316732] env[63279]: DEBUG nova.virt.hardware [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1900.317629] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcebef0-bcdc-4eb9-bf87-a00d8c2cddb3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.321426] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 0224e4ea-c13c-4abd-9626-6038c0bbe4e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.321569] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.321695] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 4a9088e0-2992-4b18-8be9-6bc70633369b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.321836] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1900.321955] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance fcc5a636-554f-424e-a604-a8e7bd7cf574 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.322086] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 5d4909ea-396c-45ba-9ff5-acb8576150b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.322202] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 1a55008a-4d8c-403d-a8f4-966aa7346f4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.322328] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 41654a82-af5e-463e-80f9-86ba13a5ad2e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1900.322467] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1900.322591] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 32e84715-0345-4171-abb7-c034a501347e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1900.322708] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 5d4be656-defe-4332-b97e-e88b107ca4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.322820] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 357f08c9-4de9-4b84-8384-6bf130872f40 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.322930] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance ecedded1-7169-49a4-8a9e-2fe4086db986 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.323049] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance ff2f355a-9687-4491-b243-6133e4b7b866 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.323212] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 32dbef6d-d314-4fa6-972a-e7b1f22eb11d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.323256] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 2f5e22f6-ba70-4848-965b-eb1553115323 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.323366] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance a8107fa5-9e8b-41dd-9679-8e106a3496a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.323472] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance f20e8eb2-847b-4988-abf9-ed5f9f65c25c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.323579] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Migration 9d057922-d0fc-4486-a0ee-a4b999090b3b is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1900.323688] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 5656c853-ac83-47be-83c4-979a9e87ab91 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.323794] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 5572bb1c-b378-4531-8459-10c2a2b7afdf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.323900] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance c1ac4af5-b01e-4175-844f-7a67b2ef7526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1900.325812] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "refresh_cache-5572bb1c-b378-4531-8459-10c2a2b7afdf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1900.326096] env[63279]: DEBUG nova.compute.manager [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Instance network_info: |[{"id": "61aa2656-cef9-4e23-b276-4328c1aeab6b", "address": "fa:16:3e:c8:ff:4e", "network": {"id": "6d7c4c21-3389-4688-b373-329b2328f275", "bridge": "br-int", "label": "tempest-ImagesTestJSON-311425692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77ee2145dda94e2b85eeb7379ed98e26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61aa2656-ce", "ovs_interfaceid": "61aa2656-cef9-4e23-b276-4328c1aeab6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1900.326380] env[63279]: DEBUG oslo_concurrency.lockutils [req-9b5b3645-2079-427a-94e3-d6ba77f32d59 req-60159f0e-f2c1-4b91-994e-ede5a30e0d30 service nova] Acquired lock "refresh_cache-5572bb1c-b378-4531-8459-10c2a2b7afdf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1900.326580] env[63279]: DEBUG nova.network.neutron [req-9b5b3645-2079-427a-94e3-d6ba77f32d59 req-60159f0e-f2c1-4b91-994e-ede5a30e0d30 service nova] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Refreshing network info cache for port 61aa2656-cef9-4e23-b276-4328c1aeab6b {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1900.327610] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:ff:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0df968ae-c1ef-4009-a0f4-6f2e799c2fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61aa2656-cef9-4e23-b276-4328c1aeab6b', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1900.335595] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Creating folder: Project (77ee2145dda94e2b85eeb7379ed98e26). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1900.337210] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-555263d4-9a14-4f8f-b1ef-f5cb13d23477 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.343137] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac876de-c730-40ee-8703-a390a6e1ae4c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.351017] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Created folder: Project (77ee2145dda94e2b85eeb7379ed98e26) in parent group-v427491. [ 1900.351231] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Creating folder: Instances. Parent ref: group-v427577. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1900.359347] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff8d6290-bcd0-4efe-a35c-f9663f475506 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.368770] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Created folder: Instances in parent group-v427577. [ 1900.369210] env[63279]: DEBUG oslo.service.loopingcall [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1900.369210] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1900.369807] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f538a633-06c3-4f5a-a6a1-4462612ead33 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.389126] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1900.389126] env[63279]: value = "task-2086961" [ 1900.389126] env[63279]: _type = "Task" [ 1900.389126] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.396292] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086961, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.465731] env[63279]: DEBUG oslo_vmware.api [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086958, 'name': PowerOffVM_Task, 'duration_secs': 0.255647} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.466040] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1900.466225] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1900.466530] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c45ef47e-ad6a-4238-8b27-a75b34e744f0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.556514] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086957, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514852} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.557803] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] f20e8eb2-847b-4988-abf9-ed5f9f65c25c/f20e8eb2-847b-4988-abf9-ed5f9f65c25c.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1900.558056] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1900.558333] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1900.558517] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1900.558696] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Deleting the datastore file [datastore1] a8107fa5-9e8b-41dd-9679-8e106a3496a5 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1900.558982] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bea1390a-edda-4d50-9265-d9a424d891d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.560892] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5f1c116-8360-49a2-8cf1-f2cf122853ce {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.566420] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1900.566420] env[63279]: value = "task-2086963" [ 1900.566420] env[63279]: _type = "Task" [ 1900.566420] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.569928] env[63279]: DEBUG oslo_vmware.api [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Waiting for the task: (returnval){ [ 1900.569928] env[63279]: value = "task-2086964" [ 1900.569928] env[63279]: _type = "Task" [ 1900.569928] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.578555] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086963, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.582226] env[63279]: DEBUG oslo_vmware.api [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086964, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.840195] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1900.899167] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086961, 'name': CreateVM_Task, 'duration_secs': 0.439153} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.899421] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1900.900073] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1900.900269] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1900.900529] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1900.900780] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65fb5908-e03b-4ba8-be68-6700fe8c1bf0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.905736] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 1900.905736] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527c28ad-7953-b8f2-424e-3a869e7f5ef4" [ 1900.905736] env[63279]: _type = "Task" [ 1900.905736] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.913810] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527c28ad-7953-b8f2-424e-3a869e7f5ef4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.080098] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086963, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064845} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.083083] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1901.083389] env[63279]: DEBUG oslo_vmware.api [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Task: {'id': task-2086964, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168945} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.084057] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb12838-08fd-40f9-b0b5-62b0f849cf0e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.086429] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1901.086679] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1901.086884] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1901.087048] env[63279]: INFO nova.compute.manager [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1901.087283] env[63279]: DEBUG oslo.service.loopingcall [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1901.087555] env[63279]: DEBUG nova.compute.manager [-] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1901.087658] env[63279]: DEBUG nova.network.neutron [-] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1901.110285] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] f20e8eb2-847b-4988-abf9-ed5f9f65c25c/f20e8eb2-847b-4988-abf9-ed5f9f65c25c.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1901.110600] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bb10dc8-7d2a-4c37-835f-1e1bd08ac7c1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.130740] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1901.130740] env[63279]: value = "task-2086965" [ 1901.130740] env[63279]: _type = "Task" [ 1901.130740] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.140987] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086965, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.214176] env[63279]: DEBUG nova.network.neutron [req-9b5b3645-2079-427a-94e3-d6ba77f32d59 req-60159f0e-f2c1-4b91-994e-ede5a30e0d30 service nova] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Updated VIF entry in instance network info cache for port 61aa2656-cef9-4e23-b276-4328c1aeab6b. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1901.214176] env[63279]: DEBUG nova.network.neutron [req-9b5b3645-2079-427a-94e3-d6ba77f32d59 req-60159f0e-f2c1-4b91-994e-ede5a30e0d30 service nova] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Updating instance_info_cache with network_info: [{"id": "61aa2656-cef9-4e23-b276-4328c1aeab6b", "address": "fa:16:3e:c8:ff:4e", "network": {"id": "6d7c4c21-3389-4688-b373-329b2328f275", "bridge": "br-int", "label": "tempest-ImagesTestJSON-311425692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77ee2145dda94e2b85eeb7379ed98e26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61aa2656-ce", "ovs_interfaceid": "61aa2656-cef9-4e23-b276-4328c1aeab6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.345604] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance acf95fad-316c-4605-9008-24d4d7c05892 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1901.364931] env[63279]: DEBUG nova.network.neutron [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Successfully updated port: d61892fc-4ba7-43db-8d82-c981cf8c7008 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1901.417886] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527c28ad-7953-b8f2-424e-3a869e7f5ef4, 'name': SearchDatastore_Task, 'duration_secs': 0.011007} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.418239] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1901.418473] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1901.418712] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.418871] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.419075] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1901.419366] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae2697a6-23da-4e95-8e2c-2ac1350d7c61 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.427380] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1901.427581] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1901.428293] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-953f0081-2642-4641-9d15-1f05b065e33d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.434530] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 1901.434530] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5202c26f-4af6-4871-7c20-f3e10cb9bda5" [ 1901.434530] env[63279]: _type = "Task" [ 1901.434530] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.442053] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5202c26f-4af6-4871-7c20-f3e10cb9bda5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.511185] env[63279]: DEBUG nova.compute.manager [req-e89e8cdc-1b6e-46b3-9818-aca1487e50bf req-072f6379-a652-4949-a465-368b6fbed75b service nova] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Received event network-vif-deleted-6516d120-aae5-4b20-b18c-9d1fcfbb6f13 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1901.511185] env[63279]: INFO nova.compute.manager [req-e89e8cdc-1b6e-46b3-9818-aca1487e50bf req-072f6379-a652-4949-a465-368b6fbed75b service nova] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Neutron deleted interface 6516d120-aae5-4b20-b18c-9d1fcfbb6f13; detaching it from the instance and deleting it from the info cache [ 1901.511185] env[63279]: DEBUG nova.network.neutron [req-e89e8cdc-1b6e-46b3-9818-aca1487e50bf req-072f6379-a652-4949-a465-368b6fbed75b service nova] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.621286] env[63279]: DEBUG nova.compute.manager [req-a37966ef-b74b-467b-aee1-a09a2e91e292 req-73bc58d7-2823-4f86-9ca4-16a862cec63d service nova] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Received event network-vif-plugged-d61892fc-4ba7-43db-8d82-c981cf8c7008 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1901.622032] env[63279]: DEBUG oslo_concurrency.lockutils [req-a37966ef-b74b-467b-aee1-a09a2e91e292 req-73bc58d7-2823-4f86-9ca4-16a862cec63d service nova] Acquiring lock "c1ac4af5-b01e-4175-844f-7a67b2ef7526-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1901.622032] env[63279]: DEBUG oslo_concurrency.lockutils [req-a37966ef-b74b-467b-aee1-a09a2e91e292 req-73bc58d7-2823-4f86-9ca4-16a862cec63d service nova] Lock "c1ac4af5-b01e-4175-844f-7a67b2ef7526-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.622032] env[63279]: DEBUG oslo_concurrency.lockutils [req-a37966ef-b74b-467b-aee1-a09a2e91e292 req-73bc58d7-2823-4f86-9ca4-16a862cec63d service nova] Lock "c1ac4af5-b01e-4175-844f-7a67b2ef7526-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.622032] env[63279]: DEBUG nova.compute.manager [req-a37966ef-b74b-467b-aee1-a09a2e91e292 req-73bc58d7-2823-4f86-9ca4-16a862cec63d service nova] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] No waiting events found dispatching network-vif-plugged-d61892fc-4ba7-43db-8d82-c981cf8c7008 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1901.622246] env[63279]: WARNING nova.compute.manager [req-a37966ef-b74b-467b-aee1-a09a2e91e292 req-73bc58d7-2823-4f86-9ca4-16a862cec63d service nova] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Received unexpected event network-vif-plugged-d61892fc-4ba7-43db-8d82-c981cf8c7008 for instance with vm_state building and task_state spawning. [ 1901.622349] env[63279]: DEBUG nova.compute.manager [req-a37966ef-b74b-467b-aee1-a09a2e91e292 req-73bc58d7-2823-4f86-9ca4-16a862cec63d service nova] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Received event network-changed-d61892fc-4ba7-43db-8d82-c981cf8c7008 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1901.622505] env[63279]: DEBUG nova.compute.manager [req-a37966ef-b74b-467b-aee1-a09a2e91e292 req-73bc58d7-2823-4f86-9ca4-16a862cec63d service nova] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Refreshing instance network info cache due to event network-changed-d61892fc-4ba7-43db-8d82-c981cf8c7008. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1901.622688] env[63279]: DEBUG oslo_concurrency.lockutils [req-a37966ef-b74b-467b-aee1-a09a2e91e292 req-73bc58d7-2823-4f86-9ca4-16a862cec63d service nova] Acquiring lock "refresh_cache-c1ac4af5-b01e-4175-844f-7a67b2ef7526" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.622822] env[63279]: DEBUG oslo_concurrency.lockutils [req-a37966ef-b74b-467b-aee1-a09a2e91e292 req-73bc58d7-2823-4f86-9ca4-16a862cec63d service nova] Acquired lock "refresh_cache-c1ac4af5-b01e-4175-844f-7a67b2ef7526" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.623615] env[63279]: DEBUG nova.network.neutron [req-a37966ef-b74b-467b-aee1-a09a2e91e292 req-73bc58d7-2823-4f86-9ca4-16a862cec63d service nova] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Refreshing network info cache for port d61892fc-4ba7-43db-8d82-c981cf8c7008 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1901.642394] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086965, 'name': ReconfigVM_Task, 'duration_secs': 0.254863} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.642663] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Reconfigured VM instance instance-0000001c to attach disk [datastore1] f20e8eb2-847b-4988-abf9-ed5f9f65c25c/f20e8eb2-847b-4988-abf9-ed5f9f65c25c.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1901.643296] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e20bf9e3-c5d6-4ced-9f57-5fb898b65165 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.649777] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1901.649777] env[63279]: value = "task-2086966" [ 1901.649777] env[63279]: _type = "Task" [ 1901.649777] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.659773] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086966, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.716690] env[63279]: DEBUG oslo_concurrency.lockutils [req-9b5b3645-2079-427a-94e3-d6ba77f32d59 req-60159f0e-f2c1-4b91-994e-ede5a30e0d30 service nova] Releasing lock "refresh_cache-5572bb1c-b378-4531-8459-10c2a2b7afdf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1901.849069] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 977fa519-2db3-4ee5-981d-c46820a8c72e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1901.871629] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "refresh_cache-c1ac4af5-b01e-4175-844f-7a67b2ef7526" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.885433] env[63279]: DEBUG nova.network.neutron [-] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.944394] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5202c26f-4af6-4871-7c20-f3e10cb9bda5, 'name': SearchDatastore_Task, 'duration_secs': 0.008705} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.945230] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-570f9dd9-1f5a-4740-a159-b08f2dede4b0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.950522] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 1901.950522] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bae324-9276-0b59-27c5-e833686e7639" [ 1901.950522] env[63279]: _type = "Task" [ 1901.950522] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.960831] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bae324-9276-0b59-27c5-e833686e7639, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.013508] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5bd5f492-1154-4bee-bc10-fa915d6d6d52 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.022694] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6b710c-afc1-416b-91ad-bf0bb25033ed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.056965] env[63279]: DEBUG nova.compute.manager [req-e89e8cdc-1b6e-46b3-9818-aca1487e50bf req-072f6379-a652-4949-a465-368b6fbed75b service nova] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Detach interface failed, port_id=6516d120-aae5-4b20-b18c-9d1fcfbb6f13, reason: Instance a8107fa5-9e8b-41dd-9679-8e106a3496a5 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 1902.160213] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086966, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.161016] env[63279]: DEBUG nova.network.neutron [req-a37966ef-b74b-467b-aee1-a09a2e91e292 req-73bc58d7-2823-4f86-9ca4-16a862cec63d service nova] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1902.231109] env[63279]: DEBUG nova.network.neutron [req-a37966ef-b74b-467b-aee1-a09a2e91e292 req-73bc58d7-2823-4f86-9ca4-16a862cec63d service nova] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.352877] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance de543869-8ab1-40ed-8f6d-dc506c257843 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1902.389880] env[63279]: INFO nova.compute.manager [-] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Took 1.30 seconds to deallocate network for instance. [ 1902.461054] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bae324-9276-0b59-27c5-e833686e7639, 'name': SearchDatastore_Task, 'duration_secs': 0.018254} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.461331] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.461590] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 5572bb1c-b378-4531-8459-10c2a2b7afdf/5572bb1c-b378-4531-8459-10c2a2b7afdf.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1902.461835] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-587707c0-fb3e-475d-8e5e-0c96837f5f3c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.468104] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 1902.468104] env[63279]: value = "task-2086967" [ 1902.468104] env[63279]: _type = "Task" [ 1902.468104] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.475637] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086967, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.663573] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086966, 'name': Rename_Task, 'duration_secs': 0.816832} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.663971] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1902.664269] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b367b85-6510-4600-847c-0ad0e4c7dece {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.672768] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1902.672768] env[63279]: value = "task-2086968" [ 1902.672768] env[63279]: _type = "Task" [ 1902.672768] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.680416] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086968, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.734055] env[63279]: DEBUG oslo_concurrency.lockutils [req-a37966ef-b74b-467b-aee1-a09a2e91e292 req-73bc58d7-2823-4f86-9ca4-16a862cec63d service nova] Releasing lock "refresh_cache-c1ac4af5-b01e-4175-844f-7a67b2ef7526" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.734284] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "refresh_cache-c1ac4af5-b01e-4175-844f-7a67b2ef7526" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.734470] env[63279]: DEBUG nova.network.neutron [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1902.857335] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 81103d53-99fe-4d1a-816f-7685c59c80ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1902.896916] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.981223] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086967, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462138} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.981223] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 5572bb1c-b378-4531-8459-10c2a2b7afdf/5572bb1c-b378-4531-8459-10c2a2b7afdf.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1902.981223] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1902.981223] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19c34722-4767-4caf-bde9-ca434e0d7d53 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.989121] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 1902.989121] env[63279]: value = "task-2086969" [ 1902.989121] env[63279]: _type = "Task" [ 1902.989121] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.996726] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086969, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.181500] env[63279]: DEBUG oslo_vmware.api [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086968, 'name': PowerOnVM_Task, 'duration_secs': 0.466233} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.181788] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1903.182010] env[63279]: INFO nova.compute.manager [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Took 8.29 seconds to spawn the instance on the hypervisor. [ 1903.182217] env[63279]: DEBUG nova.compute.manager [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1903.182960] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7891e393-8442-4ba6-9cf0-c8769b554654 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.264650] env[63279]: DEBUG nova.network.neutron [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1903.360269] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance f375b54b-f9de-4529-b752-52c240aed532 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1903.387564] env[63279]: DEBUG nova.network.neutron [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Updating instance_info_cache with network_info: [{"id": "d61892fc-4ba7-43db-8d82-c981cf8c7008", "address": "fa:16:3e:c4:e1:36", "network": {"id": "548d80cd-fb6c-47fc-8c1d-036889987399", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-219167599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f6c6f65521a440fb80278bbff2d0ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd61892fc-4b", "ovs_interfaceid": "d61892fc-4ba7-43db-8d82-c981cf8c7008", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.496198] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086969, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058653} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.496464] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1903.497296] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3369fab4-e246-4c69-8484-84135d904ffb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.518789] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 5572bb1c-b378-4531-8459-10c2a2b7afdf/5572bb1c-b378-4531-8459-10c2a2b7afdf.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1903.519068] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c92ff72-4266-4af5-a258-f1bb2f0a9da1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.539071] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 1903.539071] env[63279]: value = "task-2086970" [ 1903.539071] env[63279]: _type = "Task" [ 1903.539071] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.546968] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086970, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.704219] env[63279]: INFO nova.compute.manager [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Took 43.70 seconds to build instance. [ 1903.863329] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance e04f06de-da6a-4562-a50a-ff16bf3a006e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1903.889776] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "refresh_cache-c1ac4af5-b01e-4175-844f-7a67b2ef7526" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.890110] env[63279]: DEBUG nova.compute.manager [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Instance network_info: |[{"id": "d61892fc-4ba7-43db-8d82-c981cf8c7008", "address": "fa:16:3e:c4:e1:36", "network": {"id": "548d80cd-fb6c-47fc-8c1d-036889987399", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-219167599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f6c6f65521a440fb80278bbff2d0ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd61892fc-4b", "ovs_interfaceid": "d61892fc-4ba7-43db-8d82-c981cf8c7008", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1903.890755] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:e1:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '678ebbe4-4c53-4eaf-a689-93981310f37d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd61892fc-4ba7-43db-8d82-c981cf8c7008', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1903.898166] env[63279]: DEBUG oslo.service.loopingcall [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1903.898380] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1903.898601] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86503b43-c1c6-4bc6-999e-f86b52d39c52 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.918251] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1903.918251] env[63279]: value = "task-2086971" [ 1903.918251] env[63279]: _type = "Task" [ 1903.918251] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.927849] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086971, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.051351] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086970, 'name': ReconfigVM_Task, 'duration_secs': 0.271679} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.051659] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 5572bb1c-b378-4531-8459-10c2a2b7afdf/5572bb1c-b378-4531-8459-10c2a2b7afdf.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1904.052428] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ade83b18-068f-460e-8718-9c4e31aaba4b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.059562] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 1904.059562] env[63279]: value = "task-2086972" [ 1904.059562] env[63279]: _type = "Task" [ 1904.059562] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.068169] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086972, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.206989] env[63279]: DEBUG oslo_concurrency.lockutils [None req-517f7fbc-4234-4bef-826e-a666eac17f9f tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "f20e8eb2-847b-4988-abf9-ed5f9f65c25c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.950s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.366412] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance cf1b70af-335d-404b-bb4f-fe082dd6f450 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1904.427536] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086971, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.577043] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086972, 'name': Rename_Task, 'duration_secs': 0.137298} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.577381] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1904.577674] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6326f0f6-7177-435b-9ac5-02064c7c3f79 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.584504] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 1904.584504] env[63279]: value = "task-2086973" [ 1904.584504] env[63279]: _type = "Task" [ 1904.584504] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.592943] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086973, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.709196] env[63279]: DEBUG nova.compute.manager [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1904.869950] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance c287072d-0ce9-4075-8895-0f64326ac303 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1904.928979] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086971, 'name': CreateVM_Task, 'duration_secs': 0.524822} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.929175] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1904.929902] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.930042] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.930362] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1904.930611] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-159e12cd-af81-4ea5-862d-810fecf35144 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.935445] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1904.935445] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b2c0a6-422e-9acc-8b6e-be5488eccc24" [ 1904.935445] env[63279]: _type = "Task" [ 1904.935445] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.943400] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b2c0a6-422e-9acc-8b6e-be5488eccc24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.095330] env[63279]: DEBUG oslo_vmware.api [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086973, 'name': PowerOnVM_Task, 'duration_secs': 0.431364} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.095615] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1905.095828] env[63279]: INFO nova.compute.manager [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Took 7.46 seconds to spawn the instance on the hypervisor. [ 1905.096039] env[63279]: DEBUG nova.compute.manager [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1905.096815] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2958f144-9f87-41fb-96c0-3f926a8366df {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.236875] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.372660] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 044335c7-ce3b-4b4a-b1dc-8b9acec538b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1905.448019] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b2c0a6-422e-9acc-8b6e-be5488eccc24, 'name': SearchDatastore_Task, 'duration_secs': 0.01189} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.448358] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.448636] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1905.448887] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.449050] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.449235] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1905.449492] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f47bb60-31f8-415e-a4fa-0008bbb8d554 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.458105] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1905.458294] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1905.458992] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5eda1d60-b78e-4863-b851-53eec2da8e35 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.465194] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1905.465194] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d80621-1a5a-dba2-bfe6-722471522223" [ 1905.465194] env[63279]: _type = "Task" [ 1905.465194] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.472386] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d80621-1a5a-dba2-bfe6-722471522223, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.614840] env[63279]: INFO nova.compute.manager [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Took 42.69 seconds to build instance. [ 1905.878538] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance df410051-d551-4a90-81f7-5630f5521a10 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1905.976150] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d80621-1a5a-dba2-bfe6-722471522223, 'name': SearchDatastore_Task, 'duration_secs': 0.008862} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.976782] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7d9968a-1a12-415e-9bad-b2f36675d1f0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.982157] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1905.982157] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526674fa-efc0-7e76-4971-6a8d5974d64e" [ 1905.982157] env[63279]: _type = "Task" [ 1905.982157] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.989798] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526674fa-efc0-7e76-4971-6a8d5974d64e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.030498] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b12a750c-c556-4f5b-a549-3af7f5f296cf tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "5572bb1c-b378-4531-8459-10c2a2b7afdf" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.117275] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4432b190-b723-4c54-ac54-2b64412dd5dc tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "5572bb1c-b378-4531-8459-10c2a2b7afdf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.775s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.118607] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b12a750c-c556-4f5b-a549-3af7f5f296cf tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "5572bb1c-b378-4531-8459-10c2a2b7afdf" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.088s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.118813] env[63279]: DEBUG nova.compute.manager [None req-b12a750c-c556-4f5b-a549-3af7f5f296cf tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1906.119894] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbfa6cfd-342f-4e75-adba-8ddc26de32f1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.126404] env[63279]: DEBUG nova.compute.manager [None req-b12a750c-c556-4f5b-a549-3af7f5f296cf tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1906.126974] env[63279]: DEBUG nova.objects.instance [None req-b12a750c-c556-4f5b-a549-3af7f5f296cf tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lazy-loading 'flavor' on Instance uuid 5572bb1c-b378-4531-8459-10c2a2b7afdf {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1906.386282] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ef03080d-dc17-46cd-b23c-c76fdb70363c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "5d4be656-defe-4332-b97e-e88b107ca4a1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.386633] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ef03080d-dc17-46cd-b23c-c76fdb70363c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "5d4be656-defe-4332-b97e-e88b107ca4a1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.386775] env[63279]: DEBUG nova.compute.manager [None req-ef03080d-dc17-46cd-b23c-c76fdb70363c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1906.387580] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 3f888d81-e73f-4486-bb64-849c873449bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1906.389686] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e837480f-94a3-401a-bd81-434be196032e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.398445] env[63279]: DEBUG nova.compute.manager [None req-ef03080d-dc17-46cd-b23c-c76fdb70363c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1906.398894] env[63279]: DEBUG nova.objects.instance [None req-ef03080d-dc17-46cd-b23c-c76fdb70363c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lazy-loading 'flavor' on Instance uuid 5d4be656-defe-4332-b97e-e88b107ca4a1 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1906.492764] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526674fa-efc0-7e76-4971-6a8d5974d64e, 'name': SearchDatastore_Task, 'duration_secs': 0.009625} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.493038] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1906.493308] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] c1ac4af5-b01e-4175-844f-7a67b2ef7526/c1ac4af5-b01e-4175-844f-7a67b2ef7526.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1906.493559] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e3052f7-9072-40d3-9abf-8fe5d67d2f58 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.499470] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1906.499470] env[63279]: value = "task-2086974" [ 1906.499470] env[63279]: _type = "Task" [ 1906.499470] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.507656] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.624188] env[63279]: DEBUG nova.compute.manager [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1906.894282] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance ba2d6111-d93d-4216-b641-864b542ea253 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1907.009892] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086974, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495124} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.010172] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] c1ac4af5-b01e-4175-844f-7a67b2ef7526/c1ac4af5-b01e-4175-844f-7a67b2ef7526.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1907.010383] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1907.010634] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e0a910f4-535b-4245-b2da-2d469b922fdd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.016501] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1907.016501] env[63279]: value = "task-2086975" [ 1907.016501] env[63279]: _type = "Task" [ 1907.016501] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.023726] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086975, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.136075] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b12a750c-c556-4f5b-a549-3af7f5f296cf tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1907.136746] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a60ac77e-bc22-4285-a543-208aa1e571ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.144680] env[63279]: DEBUG oslo_vmware.api [None req-b12a750c-c556-4f5b-a549-3af7f5f296cf tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 1907.144680] env[63279]: value = "task-2086976" [ 1907.144680] env[63279]: _type = "Task" [ 1907.144680] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.150058] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.156860] env[63279]: DEBUG oslo_vmware.api [None req-b12a750c-c556-4f5b-a549-3af7f5f296cf tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086976, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.397840] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance ad435281-55a0-418a-8400-5c461a5c15ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1907.406550] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef03080d-dc17-46cd-b23c-c76fdb70363c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1907.406968] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c2071e3-95c3-4303-9f4b-e8c498d1ff09 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.414459] env[63279]: DEBUG oslo_vmware.api [None req-ef03080d-dc17-46cd-b23c-c76fdb70363c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1907.414459] env[63279]: value = "task-2086977" [ 1907.414459] env[63279]: _type = "Task" [ 1907.414459] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.423196] env[63279]: DEBUG oslo_vmware.api [None req-ef03080d-dc17-46cd-b23c-c76fdb70363c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086977, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.525946] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086975, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062563} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.526177] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1907.526995] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494246ac-0360-4118-908e-9e471b8559e6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.548946] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] c1ac4af5-b01e-4175-844f-7a67b2ef7526/c1ac4af5-b01e-4175-844f-7a67b2ef7526.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1907.549327] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bde0381-f2ce-4e15-8d91-ef2919f1534d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.568753] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1907.568753] env[63279]: value = "task-2086978" [ 1907.568753] env[63279]: _type = "Task" [ 1907.568753] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.576839] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086978, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.653951] env[63279]: DEBUG oslo_vmware.api [None req-b12a750c-c556-4f5b-a549-3af7f5f296cf tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086976, 'name': PowerOffVM_Task, 'duration_secs': 0.182264} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.654247] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b12a750c-c556-4f5b-a549-3af7f5f296cf tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1907.654449] env[63279]: DEBUG nova.compute.manager [None req-b12a750c-c556-4f5b-a549-3af7f5f296cf tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1907.655209] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c87c0e2-ebfd-466f-8111-6c2cbb5acdf7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.900024] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 79032b2a-74f7-4c6d-8f71-f848fe372ba2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1907.926975] env[63279]: DEBUG oslo_vmware.api [None req-ef03080d-dc17-46cd-b23c-c76fdb70363c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086977, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.079469] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086978, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.166933] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b12a750c-c556-4f5b-a549-3af7f5f296cf tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "5572bb1c-b378-4531-8459-10c2a2b7afdf" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.404272] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance eca98392-98be-405b-b799-463ef9ee3dc8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1908.425223] env[63279]: DEBUG oslo_vmware.api [None req-ef03080d-dc17-46cd-b23c-c76fdb70363c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086977, 'name': PowerOffVM_Task, 'duration_secs': 0.56079} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.425487] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef03080d-dc17-46cd-b23c-c76fdb70363c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1908.425683] env[63279]: DEBUG nova.compute.manager [None req-ef03080d-dc17-46cd-b23c-c76fdb70363c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1908.426464] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8e0530-b910-4922-a4a4-a02b583ea7e7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.580318] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086978, 'name': ReconfigVM_Task, 'duration_secs': 0.841197} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.580457] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Reconfigured VM instance instance-0000001e to attach disk [datastore1] c1ac4af5-b01e-4175-844f-7a67b2ef7526/c1ac4af5-b01e-4175-844f-7a67b2ef7526.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1908.581017] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9d9a00f-9cff-46ec-944f-b7dd72c974ff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.588493] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1908.588493] env[63279]: value = "task-2086979" [ 1908.588493] env[63279]: _type = "Task" [ 1908.588493] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.598017] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086979, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.911098] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 32789822-cb54-43e7-beae-b5ed3002f4ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1908.937478] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ef03080d-dc17-46cd-b23c-c76fdb70363c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "5d4be656-defe-4332-b97e-e88b107ca4a1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.551s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1909.098176] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086979, 'name': Rename_Task, 'duration_secs': 0.133839} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.098443] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1909.098690] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f3b0f91-455f-40bf-b318-efe33a08b0e1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.104730] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 1909.104730] env[63279]: value = "task-2086980" [ 1909.104730] env[63279]: _type = "Task" [ 1909.104730] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.112381] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086980, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.413214] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 1b2ca21b-feea-4fc1-9ddc-99f144e4241a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1909.502724] env[63279]: DEBUG nova.compute.manager [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1909.503672] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffe78c3-4bd7-4b00-8d6c-975f19cac467 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.616491] env[63279]: DEBUG oslo_vmware.api [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2086980, 'name': PowerOnVM_Task, 'duration_secs': 0.4228} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.616824] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1909.617037] env[63279]: INFO nova.compute.manager [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Took 9.33 seconds to spawn the instance on the hypervisor. [ 1909.617222] env[63279]: DEBUG nova.compute.manager [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1909.618049] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940e1166-65a0-4aad-b4d5-115652e325fb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.916312] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance fb124cfa-24b4-4712-b8cc-c87df5d6231b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1909.916638] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1909.916805] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4096MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1910.016081] env[63279]: INFO nova.compute.manager [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] instance snapshotting [ 1910.016081] env[63279]: WARNING nova.compute.manager [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1910.017966] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46dd3c56-ec6c-4594-b607-668727fec5a8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.039042] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b874bb-5919-4914-bd4e-539d77392824 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.136112] env[63279]: INFO nova.compute.manager [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Took 46.11 seconds to build instance. [ 1910.230694] env[63279]: DEBUG nova.objects.instance [None req-2142016b-b962-4e79-b728-ee6f5bdb718c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lazy-loading 'flavor' on Instance uuid 5d4be656-defe-4332-b97e-e88b107ca4a1 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1910.465994] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a245b2a7-3946-497f-a9cc-cc7990322cfa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.473469] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a5485b-93b5-479a-9111-771e80626bc5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.505157] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f3563f-a185-4b30-9e07-7694eb6335e5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.512764] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e96449-4d7e-4c4f-9ad4-e35bae849a1f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.526202] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1910.549444] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1910.549746] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d7566c64-9e6d-40e2-a123-baf9d6698285 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.556877] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 1910.556877] env[63279]: value = "task-2086981" [ 1910.556877] env[63279]: _type = "Task" [ 1910.556877] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.565728] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086981, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.638640] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60555cc8-e311-4c21-b52c-fa487660eaaf tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "c1ac4af5-b01e-4175-844f-7a67b2ef7526" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.470s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.736842] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2142016b-b962-4e79-b728-ee6f5bdb718c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "refresh_cache-5d4be656-defe-4332-b97e-e88b107ca4a1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1910.737092] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2142016b-b962-4e79-b728-ee6f5bdb718c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquired lock "refresh_cache-5d4be656-defe-4332-b97e-e88b107ca4a1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1910.737319] env[63279]: DEBUG nova.network.neutron [None req-2142016b-b962-4e79-b728-ee6f5bdb718c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1910.737537] env[63279]: DEBUG nova.objects.instance [None req-2142016b-b962-4e79-b728-ee6f5bdb718c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lazy-loading 'info_cache' on Instance uuid 5d4be656-defe-4332-b97e-e88b107ca4a1 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1911.049955] env[63279]: ERROR nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [req-83d1e6a4-c963-41d1-81e1-df6eee3c7080] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-83d1e6a4-c963-41d1-81e1-df6eee3c7080"}]} [ 1911.068105] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086981, 'name': CreateSnapshot_Task, 'duration_secs': 0.422162} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.069029] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1911.070827] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1911.071749] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf55dc2-51e0-4a36-b82d-c04ff4a3f0ae {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.083718] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1911.083892] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1911.096588] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1911.115403] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1911.141846] env[63279]: DEBUG nova.compute.manager [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1911.243338] env[63279]: DEBUG nova.objects.base [None req-2142016b-b962-4e79-b728-ee6f5bdb718c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Object Instance<5d4be656-defe-4332-b97e-e88b107ca4a1> lazy-loaded attributes: flavor,info_cache {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1911.588434] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1911.590687] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-33929977-09a3-400d-8154-d7e345b84759 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.598669] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 1911.598669] env[63279]: value = "task-2086982" [ 1911.598669] env[63279]: _type = "Task" [ 1911.598669] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.608547] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086982, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.657856] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.660900] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25015174-143f-434c-b89a-39da395451d8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.667958] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcbbb0d-d37d-467b-b43b-4de137a242e7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.701288] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73564437-248f-4ce7-9e2d-e763f5bc6007 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.708985] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b87490-74f7-4c5a-8f5d-d3b2fde0f51d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.724017] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1911.804307] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "58392790-b297-4894-8d81-e5cbda69872b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.804592] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "58392790-b297-4894-8d81-e5cbda69872b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.978451] env[63279]: DEBUG nova.network.neutron [None req-2142016b-b962-4e79-b728-ee6f5bdb718c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Updating instance_info_cache with network_info: [{"id": "ec7f4196-e7c7-425d-aa69-d8a766edef7f", "address": "fa:16:3e:d8:61:e1", "network": {"id": "6b1ead98-699d-4ad0-bb54-2a4fa09faf6d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-861811231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ea345c4bc9b4781acb6f774ac88c690", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec7f4196-e7", "ovs_interfaceid": "ec7f4196-e7c7-425d-aa69-d8a766edef7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.109483] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086982, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.248047] env[63279]: ERROR nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [req-cee829d7-edda-45f1-b9d7-db0fa302de02] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-cee829d7-edda-45f1-b9d7-db0fa302de02"}]} [ 1912.265976] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1912.283480] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1912.284171] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1912.297133] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1912.318721] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1912.482551] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2142016b-b962-4e79-b728-ee6f5bdb718c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Releasing lock "refresh_cache-5d4be656-defe-4332-b97e-e88b107ca4a1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1912.609125] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086982, 'name': CloneVM_Task} progress is 95%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.821648] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379dfb8d-9ed6-4535-b6a5-99e3cededfd1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.831567] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb8ff2d-8add-4717-98e2-99688d40a5c5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.863878] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af22bc8-85c3-4308-9bd8-043a31e7cc72 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.871747] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4912dc7-78b2-4d68-8590-61cea5ce92b2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.885681] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1913.109972] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086982, 'name': CloneVM_Task, 'duration_secs': 1.308314} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.111023] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Created linked-clone VM from snapshot [ 1913.111423] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c813692-7bfd-4a7b-ad28-76dbd268406b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.118713] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Uploading image 3d806ff9-66cb-4ec0-b428-16365c81abec {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1913.143616] env[63279]: DEBUG oslo_vmware.rw_handles [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1913.143616] env[63279]: value = "vm-427582" [ 1913.143616] env[63279]: _type = "VirtualMachine" [ 1913.143616] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1913.143893] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1a61cb1b-f272-4538-a629-0de8afd7763f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.150613] env[63279]: DEBUG oslo_vmware.rw_handles [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lease: (returnval){ [ 1913.150613] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]522c6ba8-f613-8fc7-fc16-7311824321b8" [ 1913.150613] env[63279]: _type = "HttpNfcLease" [ 1913.150613] env[63279]: } obtained for exporting VM: (result){ [ 1913.150613] env[63279]: value = "vm-427582" [ 1913.150613] env[63279]: _type = "VirtualMachine" [ 1913.150613] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1913.150901] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the lease: (returnval){ [ 1913.150901] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]522c6ba8-f613-8fc7-fc16-7311824321b8" [ 1913.150901] env[63279]: _type = "HttpNfcLease" [ 1913.150901] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1913.157188] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1913.157188] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]522c6ba8-f613-8fc7-fc16-7311824321b8" [ 1913.157188] env[63279]: _type = "HttpNfcLease" [ 1913.157188] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1913.420543] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 62 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1913.420778] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 62 to 63 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1913.420936] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1913.487881] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2142016b-b962-4e79-b728-ee6f5bdb718c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1913.488232] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27b7426d-3800-40ea-a4cf-31e9efe2280d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.496144] env[63279]: DEBUG oslo_vmware.api [None req-2142016b-b962-4e79-b728-ee6f5bdb718c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1913.496144] env[63279]: value = "task-2086984" [ 1913.496144] env[63279]: _type = "Task" [ 1913.496144] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.504633] env[63279]: DEBUG oslo_vmware.api [None req-2142016b-b962-4e79-b728-ee6f5bdb718c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086984, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.660905] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1913.660905] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]522c6ba8-f613-8fc7-fc16-7311824321b8" [ 1913.660905] env[63279]: _type = "HttpNfcLease" [ 1913.660905] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1913.661194] env[63279]: DEBUG oslo_vmware.rw_handles [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1913.661194] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]522c6ba8-f613-8fc7-fc16-7311824321b8" [ 1913.661194] env[63279]: _type = "HttpNfcLease" [ 1913.661194] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1913.661812] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8be83a9-10c0-43f9-91b1-0e48950f57b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.668765] env[63279]: DEBUG oslo_vmware.rw_handles [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526a02b2-101a-d8e7-ab59-519ae6ede300/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1913.668940] env[63279]: DEBUG oslo_vmware.rw_handles [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526a02b2-101a-d8e7-ab59-519ae6ede300/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1913.755475] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4282ec4b-f397-461a-a76a-5097fc8f7998 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.927856] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1913.927856] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 14.661s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.927856] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.895s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.927856] env[63279]: INFO nova.compute.claims [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1914.010391] env[63279]: DEBUG oslo_vmware.api [None req-2142016b-b962-4e79-b728-ee6f5bdb718c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086984, 'name': PowerOnVM_Task, 'duration_secs': 0.467402} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.011376] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2142016b-b962-4e79-b728-ee6f5bdb718c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1914.011591] env[63279]: DEBUG nova.compute.manager [None req-2142016b-b962-4e79-b728-ee6f5bdb718c tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1914.012439] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed4827f7-0260-4ccc-a0d4-547ce8473b3b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.661953] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1914.661953] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1915.171357] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1915.171357] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 1915.555360] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8130fe-769d-44a4-81bc-430ebee99f53 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.563875] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2002bc1a-8bda-4604-885c-a5287ffd0df0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.602669] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e286fee-28b0-4bbc-a20a-ff371dcde9cb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.611379] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9cb3f6-1792-450b-bbcb-4b886d74c4e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.626051] env[63279]: DEBUG nova.compute.provider_tree [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1916.130810] env[63279]: DEBUG nova.scheduler.client.report [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1916.635798] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.710s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.636487] env[63279]: DEBUG nova.compute.manager [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1916.639967] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.581s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.639967] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.641488] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.141s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.641684] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.644169] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.155s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.645948] env[63279]: INFO nova.compute.claims [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1916.670447] env[63279]: INFO nova.scheduler.client.report [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Deleted allocations for instance 32e84715-0345-4171-abb7-c034a501347e [ 1916.680365] env[63279]: INFO nova.scheduler.client.report [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleted allocations for instance 41654a82-af5e-463e-80f9-86ba13a5ad2e [ 1916.974093] env[63279]: DEBUG oslo_concurrency.lockutils [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "f20e8eb2-847b-4988-abf9-ed5f9f65c25c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.974771] env[63279]: DEBUG oslo_concurrency.lockutils [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "f20e8eb2-847b-4988-abf9-ed5f9f65c25c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.974771] env[63279]: DEBUG oslo_concurrency.lockutils [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "f20e8eb2-847b-4988-abf9-ed5f9f65c25c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.974902] env[63279]: DEBUG oslo_concurrency.lockutils [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "f20e8eb2-847b-4988-abf9-ed5f9f65c25c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.975042] env[63279]: DEBUG oslo_concurrency.lockutils [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "f20e8eb2-847b-4988-abf9-ed5f9f65c25c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.977186] env[63279]: INFO nova.compute.manager [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Terminating instance [ 1917.151320] env[63279]: DEBUG nova.compute.utils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1917.155013] env[63279]: DEBUG nova.compute.manager [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1917.156270] env[63279]: DEBUG nova.network.neutron [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1917.181091] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d7a8d6f-e8c1-43ff-8d0d-10d4ddd940b9 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "32e84715-0345-4171-abb7-c034a501347e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.230s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.196642] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1db1d2b-13f2-45f2-897f-ab8f0397bf37 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "41654a82-af5e-463e-80f9-86ba13a5ad2e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.349s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.227316] env[63279]: DEBUG nova.policy [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6cdc5629362f4db797563d1046a5e03e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd1ec10b968e4ef090672074f14b9287', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1917.487028] env[63279]: DEBUG nova.compute.manager [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1917.487028] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1917.487028] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4c98fe-75ba-4598-99e0-52df2ef10a71 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.496157] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1917.496490] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f03401f6-9f09-44cf-a16b-c0dd95ce3969 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.503846] env[63279]: DEBUG oslo_vmware.api [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1917.503846] env[63279]: value = "task-2086985" [ 1917.503846] env[63279]: _type = "Task" [ 1917.503846] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.513620] env[63279]: DEBUG oslo_vmware.api [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.649370] env[63279]: DEBUG nova.network.neutron [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Successfully created port: 8bca335c-50ae-4ba0-9cde-a8d640c633e1 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1917.658453] env[63279]: DEBUG nova.compute.manager [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1917.733777] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.734021] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.734281] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.734504] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.734685] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.737993] env[63279]: INFO nova.compute.manager [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Terminating instance [ 1917.762657] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "refresh_cache-6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.762795] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquired lock "refresh_cache-6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.763013] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Forcefully refreshing network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1918.015581] env[63279]: DEBUG oslo_vmware.api [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086985, 'name': PowerOffVM_Task, 'duration_secs': 0.295098} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.018374] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1918.018557] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1918.018995] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d63157c2-eeac-40f3-83fd-eefe08072f14 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.103476] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1918.103796] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1918.103872] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Deleting the datastore file [datastore1] f20e8eb2-847b-4988-abf9-ed5f9f65c25c {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1918.104143] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eca66558-a2c6-419b-94eb-e23430ca7a38 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.109849] env[63279]: DEBUG oslo_vmware.api [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1918.109849] env[63279]: value = "task-2086987" [ 1918.109849] env[63279]: _type = "Task" [ 1918.109849] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.120409] env[63279]: DEBUG oslo_vmware.api [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.230899] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b1a4ac-8bad-4e3c-ae4a-78eb44429b32 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.239345] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9b50a3-d1ce-4422-b56f-848e98394c71 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.271227] env[63279]: DEBUG nova.compute.manager [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1918.271536] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1918.275245] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fe5e30-01fe-4daf-a060-b0666a879c8b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.278562] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b16630-b52b-49ca-91f3-cd49270ca1d4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.286372] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1918.288898] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5913daf0-fdd3-4683-9493-bcf17f7d1f79 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.291721] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c707742f-0ed9-4ed6-826f-08f505c150d2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.307254] env[63279]: DEBUG nova.compute.provider_tree [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1918.310484] env[63279]: DEBUG oslo_vmware.api [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1918.310484] env[63279]: value = "task-2086988" [ 1918.310484] env[63279]: _type = "Task" [ 1918.310484] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.318799] env[63279]: DEBUG oslo_vmware.api [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086988, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.619820] env[63279]: DEBUG oslo_vmware.api [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2086987, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161177} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.620119] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1918.620309] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1918.620488] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1918.620664] env[63279]: INFO nova.compute.manager [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1918.620921] env[63279]: DEBUG oslo.service.loopingcall [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1918.621150] env[63279]: DEBUG nova.compute.manager [-] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1918.621249] env[63279]: DEBUG nova.network.neutron [-] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1918.670652] env[63279]: DEBUG nova.compute.manager [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1918.692700] env[63279]: DEBUG nova.virt.hardware [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1918.692984] env[63279]: DEBUG nova.virt.hardware [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1918.693175] env[63279]: DEBUG nova.virt.hardware [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1918.693370] env[63279]: DEBUG nova.virt.hardware [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1918.693519] env[63279]: DEBUG nova.virt.hardware [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1918.693667] env[63279]: DEBUG nova.virt.hardware [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1918.693877] env[63279]: DEBUG nova.virt.hardware [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1918.694050] env[63279]: DEBUG nova.virt.hardware [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1918.694225] env[63279]: DEBUG nova.virt.hardware [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1918.694390] env[63279]: DEBUG nova.virt.hardware [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1918.694564] env[63279]: DEBUG nova.virt.hardware [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1918.695617] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4609aaf1-4cf6-4281-a74e-44b2e2c49570 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.703977] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c0fad8-af39-4e42-869a-3a9c4c87db2d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.815906] env[63279]: DEBUG nova.scheduler.client.report [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1918.825418] env[63279]: DEBUG oslo_vmware.api [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086988, 'name': PowerOffVM_Task, 'duration_secs': 0.226154} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.828201] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1918.828201] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1918.828201] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-06d0c5a1-2793-4510-b622-88a2416909b4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.912150] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1918.912150] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1918.912150] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Deleting the datastore file [datastore1] 0224e4ea-c13c-4abd-9626-6038c0bbe4e9 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1918.912150] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1019eb9-52c5-410e-80d8-c768b089d36c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.917875] env[63279]: DEBUG oslo_vmware.api [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for the task: (returnval){ [ 1918.917875] env[63279]: value = "task-2086990" [ 1918.917875] env[63279]: _type = "Task" [ 1918.917875] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.926641] env[63279]: DEBUG oslo_vmware.api [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.940101] env[63279]: DEBUG nova.compute.manager [req-0d341b25-1580-4e8c-81a5-06a316968369 req-d03138b7-7371-427a-8ac4-8128ac50e3b6 service nova] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Received event network-vif-deleted-180a3189-be6a-456b-a89c-d5ab98c3e461 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1918.940101] env[63279]: INFO nova.compute.manager [req-0d341b25-1580-4e8c-81a5-06a316968369 req-d03138b7-7371-427a-8ac4-8128ac50e3b6 service nova] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Neutron deleted interface 180a3189-be6a-456b-a89c-d5ab98c3e461; detaching it from the instance and deleting it from the info cache [ 1918.940101] env[63279]: DEBUG nova.network.neutron [req-0d341b25-1580-4e8c-81a5-06a316968369 req-d03138b7-7371-427a-8ac4-8128ac50e3b6 service nova] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.200648] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Updating instance_info_cache with network_info: [{"id": "bc9f3899-95c1-4e79-b121-03c9a2c0bc44", "address": "fa:16:3e:9a:65:be", "network": {"id": "548d80cd-fb6c-47fc-8c1d-036889987399", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-219167599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f6c6f65521a440fb80278bbff2d0ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc9f3899-95", "ovs_interfaceid": "bc9f3899-95c1-4e79-b121-03c9a2c0bc44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.217617] env[63279]: DEBUG nova.compute.manager [req-5186155c-a9c1-4868-adaa-735128d280ea req-f3531b74-77be-4cfa-b202-ac2fdf334edf service nova] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Received event network-vif-plugged-8bca335c-50ae-4ba0-9cde-a8d640c633e1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1919.217972] env[63279]: DEBUG oslo_concurrency.lockutils [req-5186155c-a9c1-4868-adaa-735128d280ea req-f3531b74-77be-4cfa-b202-ac2fdf334edf service nova] Acquiring lock "5bb445d3-1b12-4a1b-ad2a-cbc929b13aee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.218277] env[63279]: DEBUG oslo_concurrency.lockutils [req-5186155c-a9c1-4868-adaa-735128d280ea req-f3531b74-77be-4cfa-b202-ac2fdf334edf service nova] Lock "5bb445d3-1b12-4a1b-ad2a-cbc929b13aee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.218461] env[63279]: DEBUG oslo_concurrency.lockutils [req-5186155c-a9c1-4868-adaa-735128d280ea req-f3531b74-77be-4cfa-b202-ac2fdf334edf service nova] Lock "5bb445d3-1b12-4a1b-ad2a-cbc929b13aee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.218633] env[63279]: DEBUG nova.compute.manager [req-5186155c-a9c1-4868-adaa-735128d280ea req-f3531b74-77be-4cfa-b202-ac2fdf334edf service nova] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] No waiting events found dispatching network-vif-plugged-8bca335c-50ae-4ba0-9cde-a8d640c633e1 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1919.218804] env[63279]: WARNING nova.compute.manager [req-5186155c-a9c1-4868-adaa-735128d280ea req-f3531b74-77be-4cfa-b202-ac2fdf334edf service nova] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Received unexpected event network-vif-plugged-8bca335c-50ae-4ba0-9cde-a8d640c633e1 for instance with vm_state building and task_state spawning. [ 1919.321208] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.677s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.321780] env[63279]: DEBUG nova.compute.manager [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1919.326758] env[63279]: DEBUG nova.network.neutron [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Successfully updated port: 8bca335c-50ae-4ba0-9cde-a8d640c633e1 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1919.327322] env[63279]: DEBUG oslo_concurrency.lockutils [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.965s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.327358] env[63279]: DEBUG oslo_concurrency.lockutils [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.332020] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.348s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.332349] env[63279]: INFO nova.compute.claims [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1919.359874] env[63279]: INFO nova.scheduler.client.report [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Deleted allocations for instance 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7 [ 1919.397722] env[63279]: DEBUG nova.network.neutron [-] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.428176] env[63279]: DEBUG oslo_vmware.api [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Task: {'id': task-2086990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155303} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.428457] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1919.428649] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1919.428814] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1919.428997] env[63279]: INFO nova.compute.manager [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1919.429244] env[63279]: DEBUG oslo.service.loopingcall [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1919.429431] env[63279]: DEBUG nova.compute.manager [-] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1919.429525] env[63279]: DEBUG nova.network.neutron [-] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1919.443396] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0a38919d-c990-458a-a62d-33d2419c7aad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.452116] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab5d130-6995-4f7d-a497-668eaf248757 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.486603] env[63279]: DEBUG nova.compute.manager [req-0d341b25-1580-4e8c-81a5-06a316968369 req-d03138b7-7371-427a-8ac4-8128ac50e3b6 service nova] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Detach interface failed, port_id=180a3189-be6a-456b-a89c-d5ab98c3e461, reason: Instance f20e8eb2-847b-4988-abf9-ed5f9f65c25c could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 1919.703974] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Releasing lock "refresh_cache-6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.704211] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Updated the network info_cache for instance {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10518}} [ 1919.704426] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.705226] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.705226] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.705226] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.705379] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.705471] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.705887] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 1919.705887] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.837027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "refresh_cache-5bb445d3-1b12-4a1b-ad2a-cbc929b13aee" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.837132] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquired lock "refresh_cache-5bb445d3-1b12-4a1b-ad2a-cbc929b13aee" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.837286] env[63279]: DEBUG nova.network.neutron [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1919.839384] env[63279]: DEBUG nova.compute.utils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1919.842445] env[63279]: DEBUG nova.compute.manager [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1919.842609] env[63279]: DEBUG nova.network.neutron [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1919.868043] env[63279]: DEBUG oslo_concurrency.lockutils [None req-30faeabd-f04e-4cfe-bb97-6e2391a53b66 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.997s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.900936] env[63279]: INFO nova.compute.manager [-] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Took 1.28 seconds to deallocate network for instance. [ 1919.912752] env[63279]: DEBUG nova.policy [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6cdc5629362f4db797563d1046a5e03e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd1ec10b968e4ef090672074f14b9287', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1920.210175] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.219935] env[63279]: DEBUG nova.network.neutron [-] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.244417] env[63279]: DEBUG nova.network.neutron [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Successfully created port: efb38616-854e-4529-ac78-98f777f045f6 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1920.343551] env[63279]: DEBUG nova.compute.manager [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1920.398277] env[63279]: DEBUG nova.network.neutron [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1920.409715] env[63279]: DEBUG oslo_concurrency.lockutils [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.696162] env[63279]: DEBUG nova.network.neutron [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Updating instance_info_cache with network_info: [{"id": "8bca335c-50ae-4ba0-9cde-a8d640c633e1", "address": "fa:16:3e:13:61:9d", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bca335c-50", "ovs_interfaceid": "8bca335c-50ae-4ba0-9cde-a8d640c633e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.728082] env[63279]: INFO nova.compute.manager [-] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Took 1.30 seconds to deallocate network for instance. [ 1921.019016] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f4e02a-58a1-4bd6-b362-0f31c474ec1d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.027153] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a606e9-05dc-4170-8e65-8cd777456301 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.058060] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f699dd-740b-42f8-9fa8-fc24431c58d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.066068] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d1d864-61aa-40b4-a41f-9c3c0748f46f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.081546] env[63279]: DEBUG nova.compute.provider_tree [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1921.198317] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Releasing lock "refresh_cache-5bb445d3-1b12-4a1b-ad2a-cbc929b13aee" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.198610] env[63279]: DEBUG nova.compute.manager [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Instance network_info: |[{"id": "8bca335c-50ae-4ba0-9cde-a8d640c633e1", "address": "fa:16:3e:13:61:9d", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bca335c-50", "ovs_interfaceid": "8bca335c-50ae-4ba0-9cde-a8d640c633e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1921.199564] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:61:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8bca335c-50ae-4ba0-9cde-a8d640c633e1', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1921.206553] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Creating folder: Project (fd1ec10b968e4ef090672074f14b9287). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1921.206824] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b8e6eda-ef61-473f-a730-8fa04947dcc2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.219415] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Created folder: Project (fd1ec10b968e4ef090672074f14b9287) in parent group-v427491. [ 1921.219637] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Creating folder: Instances. Parent ref: group-v427583. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1921.220298] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87ad712e-87b7-4c6b-a9ac-687f457e1dea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.231392] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Created folder: Instances in parent group-v427583. [ 1921.231632] env[63279]: DEBUG oslo.service.loopingcall [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1921.231824] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1921.232099] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5845f5a3-087e-40f8-ae3f-acdf64a278ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.250060] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.256042] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1921.256042] env[63279]: value = "task-2086993" [ 1921.256042] env[63279]: _type = "Task" [ 1921.256042] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.263812] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086993, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.269966] env[63279]: DEBUG nova.compute.manager [req-6a7ddb39-ac20-438e-b3bf-07efec1945a3 req-356c417f-7fd2-49b3-b1cd-5a5dffb2642c service nova] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Received event network-changed-8bca335c-50ae-4ba0-9cde-a8d640c633e1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1921.270274] env[63279]: DEBUG nova.compute.manager [req-6a7ddb39-ac20-438e-b3bf-07efec1945a3 req-356c417f-7fd2-49b3-b1cd-5a5dffb2642c service nova] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Refreshing instance network info cache due to event network-changed-8bca335c-50ae-4ba0-9cde-a8d640c633e1. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1921.270490] env[63279]: DEBUG oslo_concurrency.lockutils [req-6a7ddb39-ac20-438e-b3bf-07efec1945a3 req-356c417f-7fd2-49b3-b1cd-5a5dffb2642c service nova] Acquiring lock "refresh_cache-5bb445d3-1b12-4a1b-ad2a-cbc929b13aee" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.270799] env[63279]: DEBUG oslo_concurrency.lockutils [req-6a7ddb39-ac20-438e-b3bf-07efec1945a3 req-356c417f-7fd2-49b3-b1cd-5a5dffb2642c service nova] Acquired lock "refresh_cache-5bb445d3-1b12-4a1b-ad2a-cbc929b13aee" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.270799] env[63279]: DEBUG nova.network.neutron [req-6a7ddb39-ac20-438e-b3bf-07efec1945a3 req-356c417f-7fd2-49b3-b1cd-5a5dffb2642c service nova] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Refreshing network info cache for port 8bca335c-50ae-4ba0-9cde-a8d640c633e1 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1921.357735] env[63279]: DEBUG nova.compute.manager [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1921.387800] env[63279]: DEBUG nova.virt.hardware [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1921.387998] env[63279]: DEBUG nova.virt.hardware [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1921.388231] env[63279]: DEBUG nova.virt.hardware [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1921.388445] env[63279]: DEBUG nova.virt.hardware [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1921.388600] env[63279]: DEBUG nova.virt.hardware [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1921.388736] env[63279]: DEBUG nova.virt.hardware [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1921.388945] env[63279]: DEBUG nova.virt.hardware [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1921.389151] env[63279]: DEBUG nova.virt.hardware [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1921.389349] env[63279]: DEBUG nova.virt.hardware [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1921.390166] env[63279]: DEBUG nova.virt.hardware [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1921.390410] env[63279]: DEBUG nova.virt.hardware [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1921.391485] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f60bf99-7f46-4ef3-a7de-1ffa21453361 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.401266] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c517290b-bce3-43fd-a30c-ca614ed25e1a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.424333] env[63279]: DEBUG oslo_vmware.rw_handles [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526a02b2-101a-d8e7-ab59-519ae6ede300/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1921.425170] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5342d1cf-80e7-482f-8657-11de87711d57 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.431073] env[63279]: DEBUG oslo_vmware.rw_handles [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526a02b2-101a-d8e7-ab59-519ae6ede300/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1921.431250] env[63279]: ERROR oslo_vmware.rw_handles [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526a02b2-101a-d8e7-ab59-519ae6ede300/disk-0.vmdk due to incomplete transfer. [ 1921.431465] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c61fd988-b287-4d07-879a-c57a23ba731c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.438621] env[63279]: DEBUG oslo_vmware.rw_handles [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526a02b2-101a-d8e7-ab59-519ae6ede300/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1921.438822] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Uploaded image 3d806ff9-66cb-4ec0-b428-16365c81abec to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1921.441172] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1921.441410] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-09520681-e2ac-4786-95e3-d53cdbdbd9d7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.446665] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 1921.446665] env[63279]: value = "task-2086994" [ 1921.446665] env[63279]: _type = "Task" [ 1921.446665] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.454314] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086994, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.619687] env[63279]: DEBUG nova.scheduler.client.report [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 63 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1921.619955] env[63279]: DEBUG nova.compute.provider_tree [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 63 to 64 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1921.620160] env[63279]: DEBUG nova.compute.provider_tree [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1921.767402] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086993, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.915131] env[63279]: DEBUG nova.network.neutron [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Successfully updated port: efb38616-854e-4529-ac78-98f777f045f6 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1921.957300] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086994, 'name': Destroy_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.083698] env[63279]: DEBUG nova.network.neutron [req-6a7ddb39-ac20-438e-b3bf-07efec1945a3 req-356c417f-7fd2-49b3-b1cd-5a5dffb2642c service nova] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Updated VIF entry in instance network info cache for port 8bca335c-50ae-4ba0-9cde-a8d640c633e1. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1922.084239] env[63279]: DEBUG nova.network.neutron [req-6a7ddb39-ac20-438e-b3bf-07efec1945a3 req-356c417f-7fd2-49b3-b1cd-5a5dffb2642c service nova] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Updating instance_info_cache with network_info: [{"id": "8bca335c-50ae-4ba0-9cde-a8d640c633e1", "address": "fa:16:3e:13:61:9d", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bca335c-50", "ovs_interfaceid": "8bca335c-50ae-4ba0-9cde-a8d640c633e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.125703] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.795s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.126197] env[63279]: DEBUG nova.compute.manager [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1922.128751] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 38.093s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.266901] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086993, 'name': CreateVM_Task, 'duration_secs': 0.536771} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.267182] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1922.268016] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1922.268288] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1922.268665] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1922.268970] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5624b3b-59c3-4e14-ad3b-fb7e6c8f8eb0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.275023] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1922.275023] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52fd15a9-ca8a-f068-899b-fb8d3a7995d6" [ 1922.275023] env[63279]: _type = "Task" [ 1922.275023] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.284238] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fd15a9-ca8a-f068-899b-fb8d3a7995d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.417758] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "refresh_cache-acf95fad-316c-4605-9008-24d4d7c05892" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1922.418047] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquired lock "refresh_cache-acf95fad-316c-4605-9008-24d4d7c05892" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1922.418517] env[63279]: DEBUG nova.network.neutron [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1922.461119] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086994, 'name': Destroy_Task, 'duration_secs': 0.512465} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.461119] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Destroyed the VM [ 1922.461489] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1922.462216] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bbefeec6-aaf1-4e95-9c4a-3b1a725acc9c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.471092] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "8ccb4293-927a-45ba-82e9-9f1b4d5985cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.471811] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "8ccb4293-927a-45ba-82e9-9f1b4d5985cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.472579] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 1922.472579] env[63279]: value = "task-2086995" [ 1922.472579] env[63279]: _type = "Task" [ 1922.472579] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.483676] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086995, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.586758] env[63279]: DEBUG oslo_concurrency.lockutils [req-6a7ddb39-ac20-438e-b3bf-07efec1945a3 req-356c417f-7fd2-49b3-b1cd-5a5dffb2642c service nova] Releasing lock "refresh_cache-5bb445d3-1b12-4a1b-ad2a-cbc929b13aee" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.587196] env[63279]: DEBUG nova.compute.manager [req-6a7ddb39-ac20-438e-b3bf-07efec1945a3 req-356c417f-7fd2-49b3-b1cd-5a5dffb2642c service nova] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Received event network-vif-deleted-ff3873e4-61b9-4b2d-80eb-2ace560fe858 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1922.632671] env[63279]: DEBUG nova.compute.utils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1922.636725] env[63279]: DEBUG nova.compute.manager [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1922.636910] env[63279]: DEBUG nova.network.neutron [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1922.689845] env[63279]: DEBUG nova.policy [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '655d692da88947b89104e1f14f7d71f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5a719a21fe248c49d0d0151d218866b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1922.787190] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fd15a9-ca8a-f068-899b-fb8d3a7995d6, 'name': SearchDatastore_Task, 'duration_secs': 0.010874} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.787529] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.787721] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1922.787998] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1922.788187] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1922.788369] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1922.788625] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92efb1c0-9c4e-4b47-b09e-339de8af01e8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.796761] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1922.796971] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1922.800009] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8606e5a2-86a8-48bb-a4ec-6ef8caa7d9fe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.806378] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1922.806378] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ffd5f3-0448-d4ff-3fae-b8aeffaf9615" [ 1922.806378] env[63279]: _type = "Task" [ 1922.806378] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.815142] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ffd5f3-0448-d4ff-3fae-b8aeffaf9615, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.979040] env[63279]: DEBUG nova.network.neutron [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1922.989321] env[63279]: DEBUG oslo_vmware.api [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086995, 'name': RemoveSnapshot_Task, 'duration_secs': 0.357678} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.989585] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1922.989812] env[63279]: INFO nova.compute.manager [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Took 12.97 seconds to snapshot the instance on the hypervisor. [ 1923.073515] env[63279]: DEBUG nova.network.neutron [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Successfully created port: a243284f-0df1-41a1-b8e4-a8d486dd934e {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1923.083112] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "5572bb1c-b378-4531-8459-10c2a2b7afdf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.083450] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "5572bb1c-b378-4531-8459-10c2a2b7afdf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.083591] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "5572bb1c-b378-4531-8459-10c2a2b7afdf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.083937] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "5572bb1c-b378-4531-8459-10c2a2b7afdf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.084216] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "5572bb1c-b378-4531-8459-10c2a2b7afdf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.088770] env[63279]: INFO nova.compute.manager [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Terminating instance [ 1923.139051] env[63279]: DEBUG nova.compute.manager [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1923.233582] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1b9770-c053-4af0-bb04-01d9d2f98993 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.241534] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe223bab-f984-465a-b481-5477754d5426 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.275129] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e84c95-29b0-435e-9329-af3ba14a6fba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.283595] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b121e864-8cb8-4d16-bc8d-68d15bf6ac74 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.297948] env[63279]: DEBUG nova.compute.provider_tree [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1923.317590] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ffd5f3-0448-d4ff-3fae-b8aeffaf9615, 'name': SearchDatastore_Task, 'duration_secs': 0.008396} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.318553] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-337ab34d-9c9b-485e-a99c-84a37cf27e9f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.324956] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1923.324956] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526efe91-5b62-d2fe-539c-654ad53afffa" [ 1923.324956] env[63279]: _type = "Task" [ 1923.324956] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.333067] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526efe91-5b62-d2fe-539c-654ad53afffa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.381945] env[63279]: DEBUG nova.network.neutron [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Updating instance_info_cache with network_info: [{"id": "efb38616-854e-4529-ac78-98f777f045f6", "address": "fa:16:3e:76:d5:a4", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.87", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefb38616-85", "ovs_interfaceid": "efb38616-854e-4529-ac78-98f777f045f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1923.448393] env[63279]: DEBUG nova.compute.manager [req-3b469544-10f2-414d-aa9e-676f55c01609 req-94cd61a0-3699-43e3-b859-4662b4f296a4 service nova] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Received event network-vif-plugged-efb38616-854e-4529-ac78-98f777f045f6 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1923.448709] env[63279]: DEBUG oslo_concurrency.lockutils [req-3b469544-10f2-414d-aa9e-676f55c01609 req-94cd61a0-3699-43e3-b859-4662b4f296a4 service nova] Acquiring lock "acf95fad-316c-4605-9008-24d4d7c05892-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.448740] env[63279]: DEBUG oslo_concurrency.lockutils [req-3b469544-10f2-414d-aa9e-676f55c01609 req-94cd61a0-3699-43e3-b859-4662b4f296a4 service nova] Lock "acf95fad-316c-4605-9008-24d4d7c05892-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.448888] env[63279]: DEBUG oslo_concurrency.lockutils [req-3b469544-10f2-414d-aa9e-676f55c01609 req-94cd61a0-3699-43e3-b859-4662b4f296a4 service nova] Lock "acf95fad-316c-4605-9008-24d4d7c05892-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.449245] env[63279]: DEBUG nova.compute.manager [req-3b469544-10f2-414d-aa9e-676f55c01609 req-94cd61a0-3699-43e3-b859-4662b4f296a4 service nova] [instance: acf95fad-316c-4605-9008-24d4d7c05892] No waiting events found dispatching network-vif-plugged-efb38616-854e-4529-ac78-98f777f045f6 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1923.449245] env[63279]: WARNING nova.compute.manager [req-3b469544-10f2-414d-aa9e-676f55c01609 req-94cd61a0-3699-43e3-b859-4662b4f296a4 service nova] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Received unexpected event network-vif-plugged-efb38616-854e-4529-ac78-98f777f045f6 for instance with vm_state building and task_state spawning. [ 1923.449411] env[63279]: DEBUG nova.compute.manager [req-3b469544-10f2-414d-aa9e-676f55c01609 req-94cd61a0-3699-43e3-b859-4662b4f296a4 service nova] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Received event network-changed-efb38616-854e-4529-ac78-98f777f045f6 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1923.449646] env[63279]: DEBUG nova.compute.manager [req-3b469544-10f2-414d-aa9e-676f55c01609 req-94cd61a0-3699-43e3-b859-4662b4f296a4 service nova] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Refreshing instance network info cache due to event network-changed-efb38616-854e-4529-ac78-98f777f045f6. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1923.449825] env[63279]: DEBUG oslo_concurrency.lockutils [req-3b469544-10f2-414d-aa9e-676f55c01609 req-94cd61a0-3699-43e3-b859-4662b4f296a4 service nova] Acquiring lock "refresh_cache-acf95fad-316c-4605-9008-24d4d7c05892" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1923.494611] env[63279]: DEBUG nova.compute.manager [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Instance disappeared during snapshot {{(pid=63279) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1923.505991] env[63279]: DEBUG nova.compute.manager [None req-aa2a98de-449c-4cf6-b67c-ceed2ff46565 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Image not found during clean up 3d806ff9-66cb-4ec0-b428-16365c81abec {{(pid=63279) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 1923.592230] env[63279]: DEBUG nova.compute.manager [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1923.592511] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1923.593373] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cafc8e-6a33-4874-9a54-d71e54b183a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.608090] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1923.608090] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-857de7ca-d293-432e-8a90-d4fa97e1b19a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.685449] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1923.685762] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1923.685998] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleting the datastore file [datastore1] 5572bb1c-b378-4531-8459-10c2a2b7afdf {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1923.687083] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-daa1ab13-1ea5-472a-9347-ca6e9ba1f920 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.693576] env[63279]: DEBUG oslo_vmware.api [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 1923.693576] env[63279]: value = "task-2086997" [ 1923.693576] env[63279]: _type = "Task" [ 1923.693576] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.701405] env[63279]: DEBUG oslo_vmware.api [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086997, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.716463] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "64e92bfc-c0d0-4918-9ba2-45ffedbf7e39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.716624] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "64e92bfc-c0d0-4918-9ba2-45ffedbf7e39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.803173] env[63279]: DEBUG nova.scheduler.client.report [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1923.834265] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526efe91-5b62-d2fe-539c-654ad53afffa, 'name': SearchDatastore_Task, 'duration_secs': 0.009298} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.834524] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1923.834795] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee/5bb445d3-1b12-4a1b-ad2a-cbc929b13aee.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1923.835073] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2f2ff67-0cc1-459d-a602-07f38a0c840c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.841086] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1923.841086] env[63279]: value = "task-2086998" [ 1923.841086] env[63279]: _type = "Task" [ 1923.841086] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.848430] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2086998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.886027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Releasing lock "refresh_cache-acf95fad-316c-4605-9008-24d4d7c05892" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1923.886027] env[63279]: DEBUG nova.compute.manager [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Instance network_info: |[{"id": "efb38616-854e-4529-ac78-98f777f045f6", "address": "fa:16:3e:76:d5:a4", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.87", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefb38616-85", "ovs_interfaceid": "efb38616-854e-4529-ac78-98f777f045f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1923.886222] env[63279]: DEBUG oslo_concurrency.lockutils [req-3b469544-10f2-414d-aa9e-676f55c01609 req-94cd61a0-3699-43e3-b859-4662b4f296a4 service nova] Acquired lock "refresh_cache-acf95fad-316c-4605-9008-24d4d7c05892" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1923.886222] env[63279]: DEBUG nova.network.neutron [req-3b469544-10f2-414d-aa9e-676f55c01609 req-94cd61a0-3699-43e3-b859-4662b4f296a4 service nova] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Refreshing network info cache for port efb38616-854e-4529-ac78-98f777f045f6 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1923.888092] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:d5:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'efb38616-854e-4529-ac78-98f777f045f6', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1923.897050] env[63279]: DEBUG oslo.service.loopingcall [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1923.898182] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1923.898182] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a28d3aee-c626-4b7b-b07d-0be4505c8711 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.921538] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1923.921538] env[63279]: value = "task-2086999" [ 1923.921538] env[63279]: _type = "Task" [ 1923.921538] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.932458] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086999, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.151072] env[63279]: DEBUG nova.compute.manager [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1924.178189] env[63279]: DEBUG nova.virt.hardware [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1924.178838] env[63279]: DEBUG nova.virt.hardware [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1924.179258] env[63279]: DEBUG nova.virt.hardware [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1924.179720] env[63279]: DEBUG nova.virt.hardware [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1924.180022] env[63279]: DEBUG nova.virt.hardware [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1924.180425] env[63279]: DEBUG nova.virt.hardware [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1924.180797] env[63279]: DEBUG nova.virt.hardware [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1924.181246] env[63279]: DEBUG nova.virt.hardware [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1924.181675] env[63279]: DEBUG nova.virt.hardware [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1924.182105] env[63279]: DEBUG nova.virt.hardware [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1924.182510] env[63279]: DEBUG nova.virt.hardware [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1924.184293] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0f625c-dd64-4215-9f09-8dd5f5007c3e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.198776] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611189a3-e643-4ffb-bdef-8c58085e6f67 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.221068] env[63279]: DEBUG oslo_vmware.api [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2086997, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123128} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.221602] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1924.221915] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1924.225018] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1924.225018] env[63279]: INFO nova.compute.manager [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1924.225018] env[63279]: DEBUG oslo.service.loopingcall [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1924.225018] env[63279]: DEBUG nova.compute.manager [-] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1924.225018] env[63279]: DEBUG nova.network.neutron [-] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1924.350810] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2086998, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463548} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.351168] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee/5bb445d3-1b12-4a1b-ad2a-cbc929b13aee.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1924.351438] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1924.351728] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4bf3120b-9af8-436d-9060-200e916d633c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.357949] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1924.357949] env[63279]: value = "task-2087000" [ 1924.357949] env[63279]: _type = "Task" [ 1924.357949] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.365869] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087000, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.431080] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086999, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.800908] env[63279]: DEBUG nova.network.neutron [req-3b469544-10f2-414d-aa9e-676f55c01609 req-94cd61a0-3699-43e3-b859-4662b4f296a4 service nova] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Updated VIF entry in instance network info cache for port efb38616-854e-4529-ac78-98f777f045f6. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1924.801316] env[63279]: DEBUG nova.network.neutron [req-3b469544-10f2-414d-aa9e-676f55c01609 req-94cd61a0-3699-43e3-b859-4662b4f296a4 service nova] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Updating instance_info_cache with network_info: [{"id": "efb38616-854e-4529-ac78-98f777f045f6", "address": "fa:16:3e:76:d5:a4", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.87", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefb38616-85", "ovs_interfaceid": "efb38616-854e-4529-ac78-98f777f045f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.818408] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.689s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.821389] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.687s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.822805] env[63279]: INFO nova.compute.claims [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1924.869678] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087000, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077545} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.869955] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1924.875041] env[63279]: DEBUG nova.network.neutron [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Successfully updated port: a243284f-0df1-41a1-b8e4-a8d486dd934e {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1924.876808] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c2e046-c53b-462f-812e-fb060773979b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.904210] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee/5bb445d3-1b12-4a1b-ad2a-cbc929b13aee.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1924.904790] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34d58a77-09be-46e9-9119-2bf427de4cd0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.928219] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1924.928219] env[63279]: value = "task-2087001" [ 1924.928219] env[63279]: _type = "Task" [ 1924.928219] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.930372] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2086999, 'name': CreateVM_Task, 'duration_secs': 0.561347} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.933029] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1924.934022] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.934190] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.934492] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1924.935500] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5eb1a89b-e295-4c84-83b1-05d9a40caade {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.940417] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087001, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.941689] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1924.941689] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5278aae9-566b-933a-460f-ed8e593a9cc4" [ 1924.941689] env[63279]: _type = "Task" [ 1924.941689] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.947244] env[63279]: DEBUG nova.compute.manager [req-a0e41775-00fd-4728-a022-e57ad35be7a3 req-98398c15-3463-4361-9b7b-886078fc2acf service nova] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Received event network-vif-plugged-a243284f-0df1-41a1-b8e4-a8d486dd934e {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1924.947344] env[63279]: DEBUG oslo_concurrency.lockutils [req-a0e41775-00fd-4728-a022-e57ad35be7a3 req-98398c15-3463-4361-9b7b-886078fc2acf service nova] Acquiring lock "977fa519-2db3-4ee5-981d-c46820a8c72e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1924.947690] env[63279]: DEBUG oslo_concurrency.lockutils [req-a0e41775-00fd-4728-a022-e57ad35be7a3 req-98398c15-3463-4361-9b7b-886078fc2acf service nova] Lock "977fa519-2db3-4ee5-981d-c46820a8c72e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.947728] env[63279]: DEBUG oslo_concurrency.lockutils [req-a0e41775-00fd-4728-a022-e57ad35be7a3 req-98398c15-3463-4361-9b7b-886078fc2acf service nova] Lock "977fa519-2db3-4ee5-981d-c46820a8c72e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.947875] env[63279]: DEBUG nova.compute.manager [req-a0e41775-00fd-4728-a022-e57ad35be7a3 req-98398c15-3463-4361-9b7b-886078fc2acf service nova] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] No waiting events found dispatching network-vif-plugged-a243284f-0df1-41a1-b8e4-a8d486dd934e {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1924.948052] env[63279]: WARNING nova.compute.manager [req-a0e41775-00fd-4728-a022-e57ad35be7a3 req-98398c15-3463-4361-9b7b-886078fc2acf service nova] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Received unexpected event network-vif-plugged-a243284f-0df1-41a1-b8e4-a8d486dd934e for instance with vm_state building and task_state spawning. [ 1924.957484] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5278aae9-566b-933a-460f-ed8e593a9cc4, 'name': SearchDatastore_Task, 'duration_secs': 0.010493} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.957835] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1924.958232] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1924.958464] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.958670] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.958904] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1924.959288] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ac10bad-fb76-46d2-937f-2e417a1ad421 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.968259] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1924.968433] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1924.969128] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95714bb1-aad3-4f85-821d-22899e2714d7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.976522] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1924.976522] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5246f1a2-a1b9-5405-f054-7320429e4902" [ 1924.976522] env[63279]: _type = "Task" [ 1924.976522] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.984163] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5246f1a2-a1b9-5405-f054-7320429e4902, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.072487] env[63279]: DEBUG nova.network.neutron [-] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1925.304731] env[63279]: DEBUG oslo_concurrency.lockutils [req-3b469544-10f2-414d-aa9e-676f55c01609 req-94cd61a0-3699-43e3-b859-4662b4f296a4 service nova] Releasing lock "refresh_cache-acf95fad-316c-4605-9008-24d4d7c05892" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.382110] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "refresh_cache-977fa519-2db3-4ee5-981d-c46820a8c72e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.382110] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "refresh_cache-977fa519-2db3-4ee5-981d-c46820a8c72e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.382110] env[63279]: DEBUG nova.network.neutron [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1925.386230] env[63279]: INFO nova.scheduler.client.report [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Deleted allocation for migration 9d057922-d0fc-4486-a0ee-a4b999090b3b [ 1925.441838] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087001, 'name': ReconfigVM_Task, 'duration_secs': 0.315814} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.442177] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee/5bb445d3-1b12-4a1b-ad2a-cbc929b13aee.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1925.442808] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-66e23169-072d-4703-99f8-aff46254a75c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.449511] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1925.449511] env[63279]: value = "task-2087002" [ 1925.449511] env[63279]: _type = "Task" [ 1925.449511] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.458081] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087002, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.487142] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5246f1a2-a1b9-5405-f054-7320429e4902, 'name': SearchDatastore_Task, 'duration_secs': 0.009087} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.488042] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-719a612a-deb9-4dce-ba85-61c8298133fc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.493808] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1925.493808] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529046dd-c69b-a720-5ef6-1c5a4021500b" [ 1925.493808] env[63279]: _type = "Task" [ 1925.493808] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.502424] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529046dd-c69b-a720-5ef6-1c5a4021500b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.568809] env[63279]: DEBUG nova.compute.manager [req-ed7123bc-2f4e-4e3f-b310-54ad85c6ee2d req-a57ad50c-812f-4aea-b518-46ae08fa6f67 service nova] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Received event network-vif-deleted-61aa2656-cef9-4e23-b276-4328c1aeab6b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1925.574893] env[63279]: INFO nova.compute.manager [-] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Took 1.35 seconds to deallocate network for instance. [ 1925.902343] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f091a69a-b91d-4949-b27c-36c5e3e3f986 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "5656c853-ac83-47be-83c4-979a9e87ab91" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 45.401s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.966963] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087002, 'name': Rename_Task, 'duration_secs': 0.189651} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.967283] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1925.967654] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-557df1f1-2995-43a1-8e12-1f46951730f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.975388] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1925.975388] env[63279]: value = "task-2087003" [ 1925.975388] env[63279]: _type = "Task" [ 1925.975388] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.988242] env[63279]: DEBUG nova.network.neutron [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1925.992246] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087003, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.008465] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529046dd-c69b-a720-5ef6-1c5a4021500b, 'name': SearchDatastore_Task, 'duration_secs': 0.025028} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.011435] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.011781] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] acf95fad-316c-4605-9008-24d4d7c05892/acf95fad-316c-4605-9008-24d4d7c05892.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1926.013197] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfdf3c81-f724-4c17-bf5e-636bb65afbe2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.021177] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1926.021177] env[63279]: value = "task-2087004" [ 1926.021177] env[63279]: _type = "Task" [ 1926.021177] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.034863] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.083223] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.260924] env[63279]: DEBUG nova.network.neutron [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Updating instance_info_cache with network_info: [{"id": "a243284f-0df1-41a1-b8e4-a8d486dd934e", "address": "fa:16:3e:c0:97:08", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa243284f-0d", "ovs_interfaceid": "a243284f-0df1-41a1-b8e4-a8d486dd934e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1926.490454] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087003, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.533654] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087004, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.586182] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16416a73-a86b-4a74-8551-e2c32abedbd9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.594863] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc4f2bf-6d7d-4366-9ba2-a7cce823a90b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.626305] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0876ed-96ad-4b6c-aadf-d579e52cea9e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.635197] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152853f6-cfe7-40be-8069-e8c936524533 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.650267] env[63279]: DEBUG nova.compute.provider_tree [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1926.766634] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "refresh_cache-977fa519-2db3-4ee5-981d-c46820a8c72e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.767360] env[63279]: DEBUG nova.compute.manager [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Instance network_info: |[{"id": "a243284f-0df1-41a1-b8e4-a8d486dd934e", "address": "fa:16:3e:c0:97:08", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa243284f-0d", "ovs_interfaceid": "a243284f-0df1-41a1-b8e4-a8d486dd934e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1926.767874] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:97:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a243284f-0df1-41a1-b8e4-a8d486dd934e', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1926.776267] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Creating folder: Project (a5a719a21fe248c49d0d0151d218866b). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1926.776982] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-281dfa5d-f2e3-498f-9d8a-11cd9784717d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.789138] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Created folder: Project (a5a719a21fe248c49d0d0151d218866b) in parent group-v427491. [ 1926.789759] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Creating folder: Instances. Parent ref: group-v427587. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1926.789759] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e7b586e-1804-4239-9ef6-5ce270050609 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.799674] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Created folder: Instances in parent group-v427587. [ 1926.799923] env[63279]: DEBUG oslo.service.loopingcall [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1926.800152] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1926.800374] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b6cff38-2e6d-4320-b27c-2107c82e4a04 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.823009] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1926.823009] env[63279]: value = "task-2087007" [ 1926.823009] env[63279]: _type = "Task" [ 1926.823009] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.833863] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087007, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.986242] env[63279]: DEBUG oslo_vmware.api [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087003, 'name': PowerOnVM_Task, 'duration_secs': 0.661875} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.987836] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1926.987836] env[63279]: INFO nova.compute.manager [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Took 8.32 seconds to spawn the instance on the hypervisor. [ 1926.987925] env[63279]: DEBUG nova.compute.manager [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1926.989081] env[63279]: DEBUG nova.compute.manager [req-845fe910-e0ec-4f4b-85ab-ca7bbcc25336 req-52d387cd-16ec-4cbf-a41d-1b7ffa037939 service nova] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Received event network-changed-a243284f-0df1-41a1-b8e4-a8d486dd934e {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1926.989280] env[63279]: DEBUG nova.compute.manager [req-845fe910-e0ec-4f4b-85ab-ca7bbcc25336 req-52d387cd-16ec-4cbf-a41d-1b7ffa037939 service nova] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Refreshing instance network info cache due to event network-changed-a243284f-0df1-41a1-b8e4-a8d486dd934e. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1926.989505] env[63279]: DEBUG oslo_concurrency.lockutils [req-845fe910-e0ec-4f4b-85ab-ca7bbcc25336 req-52d387cd-16ec-4cbf-a41d-1b7ffa037939 service nova] Acquiring lock "refresh_cache-977fa519-2db3-4ee5-981d-c46820a8c72e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1926.989654] env[63279]: DEBUG oslo_concurrency.lockutils [req-845fe910-e0ec-4f4b-85ab-ca7bbcc25336 req-52d387cd-16ec-4cbf-a41d-1b7ffa037939 service nova] Acquired lock "refresh_cache-977fa519-2db3-4ee5-981d-c46820a8c72e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1926.990038] env[63279]: DEBUG nova.network.neutron [req-845fe910-e0ec-4f4b-85ab-ca7bbcc25336 req-52d387cd-16ec-4cbf-a41d-1b7ffa037939 service nova] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Refreshing network info cache for port a243284f-0df1-41a1-b8e4-a8d486dd934e {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1926.991687] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ae4619-a6ee-44a9-956c-5c504785751b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.030452] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087004, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.153534] env[63279]: DEBUG nova.scheduler.client.report [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1927.333830] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087007, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.511786] env[63279]: INFO nova.compute.manager [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Took 52.50 seconds to build instance. [ 1927.531788] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087004, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.661026] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.839s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.661497] env[63279]: DEBUG nova.compute.manager [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1927.666337] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.071s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.668159] env[63279]: INFO nova.compute.claims [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1927.739575] env[63279]: DEBUG nova.network.neutron [req-845fe910-e0ec-4f4b-85ab-ca7bbcc25336 req-52d387cd-16ec-4cbf-a41d-1b7ffa037939 service nova] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Updated VIF entry in instance network info cache for port a243284f-0df1-41a1-b8e4-a8d486dd934e. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1927.739575] env[63279]: DEBUG nova.network.neutron [req-845fe910-e0ec-4f4b-85ab-ca7bbcc25336 req-52d387cd-16ec-4cbf-a41d-1b7ffa037939 service nova] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Updating instance_info_cache with network_info: [{"id": "a243284f-0df1-41a1-b8e4-a8d486dd934e", "address": "fa:16:3e:c0:97:08", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa243284f-0d", "ovs_interfaceid": "a243284f-0df1-41a1-b8e4-a8d486dd934e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1927.836981] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087007, 'name': CreateVM_Task, 'duration_secs': 0.544683} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.840685] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1927.841473] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.841646] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.841971] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1927.842261] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d093677-8e2c-4e8e-b652-64712f521b80 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.849454] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.849685] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.849949] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 1927.849949] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a654d3-7c8a-b78e-979f-944ea7704967" [ 1927.849949] env[63279]: _type = "Task" [ 1927.849949] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.858284] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a654d3-7c8a-b78e-979f-944ea7704967, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.014667] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d990ec3-fde0-427d-b132-b45041bfcd25 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "5bb445d3-1b12-4a1b-ad2a-cbc929b13aee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.533s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.032277] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087004, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.557802} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.034188] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] acf95fad-316c-4605-9008-24d4d7c05892/acf95fad-316c-4605-9008-24d4d7c05892.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1928.034188] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1928.034188] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fd0e327f-6e44-4234-9616-0c18000ef35e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.041374] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1928.041374] env[63279]: value = "task-2087008" [ 1928.041374] env[63279]: _type = "Task" [ 1928.041374] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.050867] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087008, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.172824] env[63279]: DEBUG nova.compute.utils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1928.175974] env[63279]: DEBUG nova.compute.manager [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1928.176266] env[63279]: DEBUG nova.network.neutron [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1928.236767] env[63279]: DEBUG nova.policy [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc59cb27caf64e7a9d916dbe60778314', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '685b5deb728448eb9eb023905d680288', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1928.241178] env[63279]: DEBUG oslo_concurrency.lockutils [req-845fe910-e0ec-4f4b-85ab-ca7bbcc25336 req-52d387cd-16ec-4cbf-a41d-1b7ffa037939 service nova] Releasing lock "refresh_cache-977fa519-2db3-4ee5-981d-c46820a8c72e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.360649] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a654d3-7c8a-b78e-979f-944ea7704967, 'name': SearchDatastore_Task, 'duration_secs': 0.010108} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.360965] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.361482] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1928.361482] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1928.361595] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1928.361762] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1928.362022] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b8dd20e-f1c7-4b78-af8b-e06eeeb1d015 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.370325] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1928.370511] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1928.371204] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bd16216-502e-4662-a5e4-17e07a844d16 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.376225] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 1928.376225] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]524d7323-f953-7e81-94e2-b9dc12ddd692" [ 1928.376225] env[63279]: _type = "Task" [ 1928.376225] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.385741] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524d7323-f953-7e81-94e2-b9dc12ddd692, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.517568] env[63279]: DEBUG nova.compute.manager [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1928.545824] env[63279]: DEBUG nova.network.neutron [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Successfully created port: c8a557ed-47ef-46c1-a803-10a8150cb66a {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1928.553710] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087008, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068454} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.553808] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1928.554628] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de7b22b-c0d3-45e0-aa56-e2af3e89b313 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.577766] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] acf95fad-316c-4605-9008-24d4d7c05892/acf95fad-316c-4605-9008-24d4d7c05892.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1928.578870] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f6bd527-1927-42d0-8607-d3d033048822 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.599259] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1928.599259] env[63279]: value = "task-2087009" [ 1928.599259] env[63279]: _type = "Task" [ 1928.599259] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.611030] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087009, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.676828] env[63279]: DEBUG nova.compute.manager [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1928.885995] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524d7323-f953-7e81-94e2-b9dc12ddd692, 'name': SearchDatastore_Task, 'duration_secs': 0.008876} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.889057] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66be7269-d22a-4f8c-ad30-50f42bc6220e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.894089] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 1928.894089] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52639fcc-34b6-1669-e9e8-5294bee7e49c" [ 1928.894089] env[63279]: _type = "Task" [ 1928.894089] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.905401] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52639fcc-34b6-1669-e9e8-5294bee7e49c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.037025] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.108521] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087009, 'name': ReconfigVM_Task, 'duration_secs': 0.269867} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.108823] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Reconfigured VM instance instance-00000020 to attach disk [datastore1] acf95fad-316c-4605-9008-24d4d7c05892/acf95fad-316c-4605-9008-24d4d7c05892.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1929.109570] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08ce59e2-6060-411c-8f98-d88b110dde81 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.116311] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1929.116311] env[63279]: value = "task-2087010" [ 1929.116311] env[63279]: _type = "Task" [ 1929.116311] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.124619] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087010, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.174544] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73b74a0-bd5a-43ea-bc7d-dddf5c34b83b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.182077] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c426cba-202d-4e3d-bf49-ae554c7fc0b2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.221130] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93bd40ee-5612-4bb3-b681-612f202487b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.230057] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c782deeb-afb2-43f7-a6e8-3e99790a5303 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.245848] env[63279]: DEBUG nova.compute.provider_tree [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1929.404871] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52639fcc-34b6-1669-e9e8-5294bee7e49c, 'name': SearchDatastore_Task, 'duration_secs': 0.012972} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.405254] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1929.405566] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 977fa519-2db3-4ee5-981d-c46820a8c72e/977fa519-2db3-4ee5-981d-c46820a8c72e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1929.405847] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40f83ad5-a61c-42d8-9374-30b10892b48a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.412996] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 1929.412996] env[63279]: value = "task-2087011" [ 1929.412996] env[63279]: _type = "Task" [ 1929.412996] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.420762] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087011, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.630480] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087010, 'name': Rename_Task, 'duration_secs': 0.157334} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.630776] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1929.631047] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa9e78d3-2a47-4261-bd05-10742c55223c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.638695] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1929.638695] env[63279]: value = "task-2087012" [ 1929.638695] env[63279]: _type = "Task" [ 1929.638695] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.651861] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087012, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.691608] env[63279]: DEBUG nova.compute.manager [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1929.725374] env[63279]: DEBUG nova.virt.hardware [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1929.725640] env[63279]: DEBUG nova.virt.hardware [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1929.725811] env[63279]: DEBUG nova.virt.hardware [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1929.726013] env[63279]: DEBUG nova.virt.hardware [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1929.726331] env[63279]: DEBUG nova.virt.hardware [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1929.726495] env[63279]: DEBUG nova.virt.hardware [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1929.726740] env[63279]: DEBUG nova.virt.hardware [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1929.726902] env[63279]: DEBUG nova.virt.hardware [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1929.727104] env[63279]: DEBUG nova.virt.hardware [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1929.727294] env[63279]: DEBUG nova.virt.hardware [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1929.727474] env[63279]: DEBUG nova.virt.hardware [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1929.728416] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c38d8a-d8b0-4108-a0b7-bb69ab20d89c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.737893] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3675c5-72cc-4ea0-abd8-c9d766f08b29 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.775065] env[63279]: ERROR nova.scheduler.client.report [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [req-27123c28-de3b-407c-8cfe-9ac75c1a4537] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-27123c28-de3b-407c-8cfe-9ac75c1a4537"}]} [ 1929.792351] env[63279]: DEBUG nova.scheduler.client.report [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1929.809595] env[63279]: DEBUG nova.scheduler.client.report [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1929.809943] env[63279]: DEBUG nova.compute.provider_tree [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1929.823822] env[63279]: DEBUG nova.scheduler.client.report [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1929.843922] env[63279]: DEBUG nova.scheduler.client.report [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1929.929415] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087011, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50505} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.930237] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 977fa519-2db3-4ee5-981d-c46820a8c72e/977fa519-2db3-4ee5-981d-c46820a8c72e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1929.930485] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1929.930848] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7fb978fa-1709-4c85-97bc-bdb285a9e147 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.939788] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 1929.939788] env[63279]: value = "task-2087013" [ 1929.939788] env[63279]: _type = "Task" [ 1929.939788] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.956354] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087013, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.035600] env[63279]: DEBUG nova.compute.manager [req-c7b68c05-a660-4b09-9b3a-9cb726578934 req-ef08eefe-9f51-4517-872f-095301b1e7d0 service nova] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Received event network-vif-plugged-c8a557ed-47ef-46c1-a803-10a8150cb66a {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1930.035823] env[63279]: DEBUG oslo_concurrency.lockutils [req-c7b68c05-a660-4b09-9b3a-9cb726578934 req-ef08eefe-9f51-4517-872f-095301b1e7d0 service nova] Acquiring lock "de543869-8ab1-40ed-8f6d-dc506c257843-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1930.036089] env[63279]: DEBUG oslo_concurrency.lockutils [req-c7b68c05-a660-4b09-9b3a-9cb726578934 req-ef08eefe-9f51-4517-872f-095301b1e7d0 service nova] Lock "de543869-8ab1-40ed-8f6d-dc506c257843-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1930.036226] env[63279]: DEBUG oslo_concurrency.lockutils [req-c7b68c05-a660-4b09-9b3a-9cb726578934 req-ef08eefe-9f51-4517-872f-095301b1e7d0 service nova] Lock "de543869-8ab1-40ed-8f6d-dc506c257843-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.036378] env[63279]: DEBUG nova.compute.manager [req-c7b68c05-a660-4b09-9b3a-9cb726578934 req-ef08eefe-9f51-4517-872f-095301b1e7d0 service nova] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] No waiting events found dispatching network-vif-plugged-c8a557ed-47ef-46c1-a803-10a8150cb66a {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1930.036549] env[63279]: WARNING nova.compute.manager [req-c7b68c05-a660-4b09-9b3a-9cb726578934 req-ef08eefe-9f51-4517-872f-095301b1e7d0 service nova] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Received unexpected event network-vif-plugged-c8a557ed-47ef-46c1-a803-10a8150cb66a for instance with vm_state building and task_state spawning. [ 1930.138358] env[63279]: DEBUG nova.network.neutron [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Successfully updated port: c8a557ed-47ef-46c1-a803-10a8150cb66a {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1930.151410] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087012, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.416187] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd208564-1cdf-420c-83dd-d2d3784d4082 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.425389] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb6096e-4284-4d46-8345-3697809463cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.467854] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d476f43-7e9a-4b5d-a287-4760c9d6052b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.477778] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1c22e2-a439-4716-9051-ce521324593c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.481539] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087013, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093136} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.483034] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1930.483034] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f45656a5-ccab-42d9-aed4-6e189935560a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.493348] env[63279]: DEBUG nova.compute.provider_tree [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1930.515952] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 977fa519-2db3-4ee5-981d-c46820a8c72e/977fa519-2db3-4ee5-981d-c46820a8c72e.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1930.516955] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebc04108-f47c-4169-81a2-e86dcb42570b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.537840] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 1930.537840] env[63279]: value = "task-2087014" [ 1930.537840] env[63279]: _type = "Task" [ 1930.537840] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.546129] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087014, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.643947] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquiring lock "refresh_cache-de543869-8ab1-40ed-8f6d-dc506c257843" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.644134] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquired lock "refresh_cache-de543869-8ab1-40ed-8f6d-dc506c257843" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.644290] env[63279]: DEBUG nova.network.neutron [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1930.648634] env[63279]: DEBUG oslo_vmware.api [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087012, 'name': PowerOnVM_Task, 'duration_secs': 0.810013} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.649165] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1930.649367] env[63279]: INFO nova.compute.manager [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Took 9.29 seconds to spawn the instance on the hypervisor. [ 1930.649566] env[63279]: DEBUG nova.compute.manager [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1930.650386] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6044aa7d-9b90-4d7f-8c42-082bd1432b08 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.025717] env[63279]: DEBUG nova.scheduler.client.report [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 68 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1931.026248] env[63279]: DEBUG nova.compute.provider_tree [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 68 to 69 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1931.026574] env[63279]: DEBUG nova.compute.provider_tree [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1931.048303] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087014, 'name': ReconfigVM_Task, 'duration_secs': 0.304736} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.048608] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 977fa519-2db3-4ee5-981d-c46820a8c72e/977fa519-2db3-4ee5-981d-c46820a8c72e.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1931.049427] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd11f737-bfc7-479b-bba1-ec95fa7eb213 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.057164] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 1931.057164] env[63279]: value = "task-2087015" [ 1931.057164] env[63279]: _type = "Task" [ 1931.057164] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.065620] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087015, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.169349] env[63279]: INFO nova.compute.manager [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Took 52.71 seconds to build instance. [ 1931.187571] env[63279]: DEBUG nova.network.neutron [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1931.365688] env[63279]: DEBUG nova.network.neutron [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Updating instance_info_cache with network_info: [{"id": "c8a557ed-47ef-46c1-a803-10a8150cb66a", "address": "fa:16:3e:80:45:7c", "network": {"id": "b0988280-189d-4f04-95fc-fd0a9b112434", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1478095758-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "685b5deb728448eb9eb023905d680288", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8a557ed-47", "ovs_interfaceid": "c8a557ed-47ef-46c1-a803-10a8150cb66a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.534148] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.866s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.534148] env[63279]: DEBUG nova.compute.manager [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1931.540022] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.296s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.540022] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.540022] env[63279]: DEBUG oslo_concurrency.lockutils [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.558s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.540230] env[63279]: INFO nova.compute.claims [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1931.567932] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087015, 'name': Rename_Task, 'duration_secs': 0.140884} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.568665] env[63279]: INFO nova.scheduler.client.report [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Deleted allocations for instance 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f [ 1931.572632] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1931.576295] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-643b1b64-bd27-4323-b05a-a2c05ae16c49 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.583062] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 1931.583062] env[63279]: value = "task-2087016" [ 1931.583062] env[63279]: _type = "Task" [ 1931.583062] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.595089] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087016, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.672982] env[63279]: DEBUG oslo_concurrency.lockutils [None req-96273fea-e321-4566-9745-1792b9a7cc32 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "acf95fad-316c-4605-9008-24d4d7c05892" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.959s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.868668] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Releasing lock "refresh_cache-de543869-8ab1-40ed-8f6d-dc506c257843" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.869028] env[63279]: DEBUG nova.compute.manager [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Instance network_info: |[{"id": "c8a557ed-47ef-46c1-a803-10a8150cb66a", "address": "fa:16:3e:80:45:7c", "network": {"id": "b0988280-189d-4f04-95fc-fd0a9b112434", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1478095758-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "685b5deb728448eb9eb023905d680288", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8a557ed-47", "ovs_interfaceid": "c8a557ed-47ef-46c1-a803-10a8150cb66a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1931.869479] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:45:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac7039c0-3374-4c08-87fc-af2449b48b02', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8a557ed-47ef-46c1-a803-10a8150cb66a', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1931.877918] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Creating folder: Project (685b5deb728448eb9eb023905d680288). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1931.878242] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e57be58-8cae-4757-9b7f-077c4bdd0c81 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.889114] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Created folder: Project (685b5deb728448eb9eb023905d680288) in parent group-v427491. [ 1931.889329] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Creating folder: Instances. Parent ref: group-v427590. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1931.889576] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8113bfe4-87d5-49a7-ac42-04816321cebb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.898641] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Created folder: Instances in parent group-v427590. [ 1931.898891] env[63279]: DEBUG oslo.service.loopingcall [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1931.899098] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1931.899373] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a5ca92c-ae14-4efd-b0c4-ec0e210e8f32 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.919823] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1931.919823] env[63279]: value = "task-2087019" [ 1931.919823] env[63279]: _type = "Task" [ 1931.919823] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.935184] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087019, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.047316] env[63279]: DEBUG nova.compute.utils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1932.050176] env[63279]: DEBUG nova.compute.manager [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1932.050256] env[63279]: DEBUG nova.network.neutron [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1932.082123] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b1a09f8-d41f-4fc7-9c4b-8c6edcf3ec86 tempest-ImagesOneServerTestJSON-859902932 tempest-ImagesOneServerTestJSON-859902932-project-member] Lock "7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.294s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1932.095076] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087016, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.174459] env[63279]: DEBUG nova.policy [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87bda619a8044645b38e1468a7e5b760', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '930a294f790e4ba58a1295ce3240f10a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1932.176965] env[63279]: DEBUG nova.compute.manager [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1932.208142] env[63279]: DEBUG nova.compute.manager [req-75e07a6a-b7eb-492f-beb0-0ff089ab4c89 req-88ac170c-c50d-4fd8-9211-dc4502bea0ca service nova] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Received event network-changed-c8a557ed-47ef-46c1-a803-10a8150cb66a {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1932.208358] env[63279]: DEBUG nova.compute.manager [req-75e07a6a-b7eb-492f-beb0-0ff089ab4c89 req-88ac170c-c50d-4fd8-9211-dc4502bea0ca service nova] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Refreshing instance network info cache due to event network-changed-c8a557ed-47ef-46c1-a803-10a8150cb66a. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1932.208578] env[63279]: DEBUG oslo_concurrency.lockutils [req-75e07a6a-b7eb-492f-beb0-0ff089ab4c89 req-88ac170c-c50d-4fd8-9211-dc4502bea0ca service nova] Acquiring lock "refresh_cache-de543869-8ab1-40ed-8f6d-dc506c257843" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1932.208805] env[63279]: DEBUG oslo_concurrency.lockutils [req-75e07a6a-b7eb-492f-beb0-0ff089ab4c89 req-88ac170c-c50d-4fd8-9211-dc4502bea0ca service nova] Acquired lock "refresh_cache-de543869-8ab1-40ed-8f6d-dc506c257843" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.208872] env[63279]: DEBUG nova.network.neutron [req-75e07a6a-b7eb-492f-beb0-0ff089ab4c89 req-88ac170c-c50d-4fd8-9211-dc4502bea0ca service nova] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Refreshing network info cache for port c8a557ed-47ef-46c1-a803-10a8150cb66a {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1932.432459] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087019, 'name': CreateVM_Task, 'duration_secs': 0.421433} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.432645] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1932.433369] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1932.433557] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.433929] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1932.434359] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5842afda-07aa-4488-bb43-68db9181348c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.441967] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Waiting for the task: (returnval){ [ 1932.441967] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52316fb3-7530-85ed-e273-93712bd6ce08" [ 1932.441967] env[63279]: _type = "Task" [ 1932.441967] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.454726] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52316fb3-7530-85ed-e273-93712bd6ce08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.550589] env[63279]: DEBUG nova.compute.manager [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1932.595833] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087016, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.708074] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1932.868411] env[63279]: DEBUG nova.network.neutron [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Successfully created port: b059c76d-75a5-432c-b648-12c287223d83 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1932.953702] env[63279]: DEBUG nova.compute.manager [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1932.953702] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feceae34-9749-4793-8609-18ca6abe49eb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.965351] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52316fb3-7530-85ed-e273-93712bd6ce08, 'name': SearchDatastore_Task, 'duration_secs': 0.011589} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.968217] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1932.968484] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1932.968743] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1932.968898] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.969152] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1932.969465] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f55aa6a-618e-411f-9106-12fe5299637d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.981693] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1932.981933] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1932.985497] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e50fed4-4720-457f-bbc5-8283bfe40fcb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.991999] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Waiting for the task: (returnval){ [ 1932.991999] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52382a97-8f0e-cdad-c0c3-76e26d1afcaa" [ 1932.991999] env[63279]: _type = "Task" [ 1932.991999] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.009232] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52382a97-8f0e-cdad-c0c3-76e26d1afcaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.101839] env[63279]: DEBUG oslo_vmware.api [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087016, 'name': PowerOnVM_Task, 'duration_secs': 1.052089} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.102848] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1933.103275] env[63279]: INFO nova.compute.manager [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Took 8.95 seconds to spawn the instance on the hypervisor. [ 1933.103472] env[63279]: DEBUG nova.compute.manager [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1933.104524] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd79b1e-1444-464f-90ae-670d0417b465 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.254698] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2666757-90cd-469a-9d0c-2e301f41a810 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.263343] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2a5a2c-0ee6-41b5-b691-dc32ce07a8bf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.270043] env[63279]: DEBUG nova.network.neutron [req-75e07a6a-b7eb-492f-beb0-0ff089ab4c89 req-88ac170c-c50d-4fd8-9211-dc4502bea0ca service nova] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Updated VIF entry in instance network info cache for port c8a557ed-47ef-46c1-a803-10a8150cb66a. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1933.270486] env[63279]: DEBUG nova.network.neutron [req-75e07a6a-b7eb-492f-beb0-0ff089ab4c89 req-88ac170c-c50d-4fd8-9211-dc4502bea0ca service nova] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Updating instance_info_cache with network_info: [{"id": "c8a557ed-47ef-46c1-a803-10a8150cb66a", "address": "fa:16:3e:80:45:7c", "network": {"id": "b0988280-189d-4f04-95fc-fd0a9b112434", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1478095758-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "685b5deb728448eb9eb023905d680288", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8a557ed-47", "ovs_interfaceid": "c8a557ed-47ef-46c1-a803-10a8150cb66a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.303618] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fee2f20-59d9-4a87-ab9e-a763236d93f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.313315] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7e4173-e5d0-4bd0-93e9-b13bd27c6976 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.329469] env[63279]: DEBUG nova.compute.provider_tree [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1933.467671] env[63279]: INFO nova.compute.manager [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] instance snapshotting [ 1933.471974] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39a9f91-c289-4e50-8f79-1cae95503694 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.492746] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-762447ba-9af9-49f5-b704-738f1469b310 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.510835] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52382a97-8f0e-cdad-c0c3-76e26d1afcaa, 'name': SearchDatastore_Task, 'duration_secs': 0.013507} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.514020] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c87b82d-dc34-4cff-ae9b-eb9f32728d84 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.518199] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Waiting for the task: (returnval){ [ 1933.518199] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5229ec34-a745-60c5-5673-d340f82848b3" [ 1933.518199] env[63279]: _type = "Task" [ 1933.518199] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.528941] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5229ec34-a745-60c5-5673-d340f82848b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.563569] env[63279]: DEBUG nova.compute.manager [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1933.592968] env[63279]: DEBUG nova.virt.hardware [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1933.593227] env[63279]: DEBUG nova.virt.hardware [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1933.593384] env[63279]: DEBUG nova.virt.hardware [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1933.593560] env[63279]: DEBUG nova.virt.hardware [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1933.593703] env[63279]: DEBUG nova.virt.hardware [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1933.593848] env[63279]: DEBUG nova.virt.hardware [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1933.594290] env[63279]: DEBUG nova.virt.hardware [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1933.594545] env[63279]: DEBUG nova.virt.hardware [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1933.594753] env[63279]: DEBUG nova.virt.hardware [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1933.594981] env[63279]: DEBUG nova.virt.hardware [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1933.595231] env[63279]: DEBUG nova.virt.hardware [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1933.596158] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354fc704-cb1c-4e3e-a1ad-04b194b43bf9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.605036] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829968ab-8327-48fd-8b43-6cdcd1355fb3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.634426] env[63279]: INFO nova.compute.manager [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Took 51.67 seconds to build instance. [ 1933.773213] env[63279]: DEBUG oslo_concurrency.lockutils [req-75e07a6a-b7eb-492f-beb0-0ff089ab4c89 req-88ac170c-c50d-4fd8-9211-dc4502bea0ca service nova] Releasing lock "refresh_cache-de543869-8ab1-40ed-8f6d-dc506c257843" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.834059] env[63279]: DEBUG nova.scheduler.client.report [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1934.012820] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1934.013177] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2612435d-48d8-4b6e-ba59-626771168114 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.024160] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1934.024160] env[63279]: value = "task-2087020" [ 1934.024160] env[63279]: _type = "Task" [ 1934.024160] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.032049] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5229ec34-a745-60c5-5673-d340f82848b3, 'name': SearchDatastore_Task, 'duration_secs': 0.010761} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.035014] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1934.035014] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] de543869-8ab1-40ed-8f6d-dc506c257843/de543869-8ab1-40ed-8f6d-dc506c257843.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1934.035014] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14f0c937-d447-4dfe-8c32-4d54cc2a90ef {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.043577] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087020, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.049031] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Waiting for the task: (returnval){ [ 1934.049031] env[63279]: value = "task-2087021" [ 1934.049031] env[63279]: _type = "Task" [ 1934.049031] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.057206] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087021, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.139170] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1085f46f-382d-442a-8127-97a2236452f2 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "977fa519-2db3-4ee5-981d-c46820a8c72e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.302s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.340049] env[63279]: DEBUG oslo_concurrency.lockutils [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.801s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.340500] env[63279]: DEBUG nova.compute.manager [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1934.344815] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.069s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.346758] env[63279]: INFO nova.compute.claims [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1934.536123] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087020, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.559750] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087021, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489242} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.559750] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] de543869-8ab1-40ed-8f6d-dc506c257843/de543869-8ab1-40ed-8f6d-dc506c257843.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1934.559750] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1934.559750] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-730df878-2016-46c6-b14a-d9c6bd68076a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.568403] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Waiting for the task: (returnval){ [ 1934.568403] env[63279]: value = "task-2087022" [ 1934.568403] env[63279]: _type = "Task" [ 1934.568403] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.577227] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087022, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.599010] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "977fa519-2db3-4ee5-981d-c46820a8c72e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.599291] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "977fa519-2db3-4ee5-981d-c46820a8c72e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.600133] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "977fa519-2db3-4ee5-981d-c46820a8c72e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.600133] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "977fa519-2db3-4ee5-981d-c46820a8c72e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.600133] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "977fa519-2db3-4ee5-981d-c46820a8c72e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.602269] env[63279]: INFO nova.compute.manager [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Terminating instance [ 1934.640287] env[63279]: DEBUG nova.compute.manager [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1934.846838] env[63279]: DEBUG nova.compute.utils [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1934.849325] env[63279]: DEBUG nova.compute.manager [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1934.853029] env[63279]: DEBUG nova.network.neutron [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1934.926576] env[63279]: DEBUG nova.policy [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a80f519ef19849988b2f61a85e4a8582', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '691f1159e7af42429cca85900fac343d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1935.038039] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087020, 'name': CreateSnapshot_Task, 'duration_secs': 0.908919} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.038301] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1935.039147] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72ef932-f5a8-422d-acf9-03f10cbd69a0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.082636] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087022, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123316} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.082636] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1935.082744] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23081f1-2340-485c-8f24-b5202eabe548 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.088211] env[63279]: DEBUG nova.compute.manager [req-e1f01fa4-0d8a-4bd0-8f6d-b19261483517 req-b9ba544a-0c6a-4d12-83bb-8e072346b41c service nova] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Received event network-vif-plugged-b059c76d-75a5-432c-b648-12c287223d83 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1935.088211] env[63279]: DEBUG oslo_concurrency.lockutils [req-e1f01fa4-0d8a-4bd0-8f6d-b19261483517 req-b9ba544a-0c6a-4d12-83bb-8e072346b41c service nova] Acquiring lock "81103d53-99fe-4d1a-816f-7685c59c80ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.088211] env[63279]: DEBUG oslo_concurrency.lockutils [req-e1f01fa4-0d8a-4bd0-8f6d-b19261483517 req-b9ba544a-0c6a-4d12-83bb-8e072346b41c service nova] Lock "81103d53-99fe-4d1a-816f-7685c59c80ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1935.088211] env[63279]: DEBUG oslo_concurrency.lockutils [req-e1f01fa4-0d8a-4bd0-8f6d-b19261483517 req-b9ba544a-0c6a-4d12-83bb-8e072346b41c service nova] Lock "81103d53-99fe-4d1a-816f-7685c59c80ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.088401] env[63279]: DEBUG nova.compute.manager [req-e1f01fa4-0d8a-4bd0-8f6d-b19261483517 req-b9ba544a-0c6a-4d12-83bb-8e072346b41c service nova] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] No waiting events found dispatching network-vif-plugged-b059c76d-75a5-432c-b648-12c287223d83 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1935.088432] env[63279]: WARNING nova.compute.manager [req-e1f01fa4-0d8a-4bd0-8f6d-b19261483517 req-b9ba544a-0c6a-4d12-83bb-8e072346b41c service nova] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Received unexpected event network-vif-plugged-b059c76d-75a5-432c-b648-12c287223d83 for instance with vm_state building and task_state spawning. [ 1935.110983] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] de543869-8ab1-40ed-8f6d-dc506c257843/de543869-8ab1-40ed-8f6d-dc506c257843.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1935.112413] env[63279]: DEBUG nova.compute.manager [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1935.112624] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1935.113130] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5825ee5c-672d-43c0-9676-be89760dae51 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.129189] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2b5ff3-3dc7-4297-ab45-f421f4dc5e2f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.139997] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1935.141482] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e39d1435-3e80-4fc6-9065-74c19fb1a077 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.143598] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Waiting for the task: (returnval){ [ 1935.143598] env[63279]: value = "task-2087023" [ 1935.143598] env[63279]: _type = "Task" [ 1935.143598] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.153206] env[63279]: DEBUG oslo_vmware.api [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 1935.153206] env[63279]: value = "task-2087024" [ 1935.153206] env[63279]: _type = "Task" [ 1935.153206] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.163082] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087023, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.169409] env[63279]: DEBUG oslo_vmware.api [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087024, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.179323] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.297759] env[63279]: DEBUG nova.network.neutron [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Successfully updated port: b059c76d-75a5-432c-b648-12c287223d83 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1935.353266] env[63279]: DEBUG nova.compute.manager [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1935.557708] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1935.558031] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a1ad3c2f-fec1-4ac6-bbdf-0681ca46e9e2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.564351] env[63279]: DEBUG nova.network.neutron [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Successfully created port: 7dc16370-0621-49ff-9730-abdfd18ff164 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1935.571128] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1935.571128] env[63279]: value = "task-2087025" [ 1935.571128] env[63279]: _type = "Task" [ 1935.571128] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.579399] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087025, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.653528] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087023, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.664875] env[63279]: DEBUG oslo_vmware.api [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087024, 'name': PowerOffVM_Task, 'duration_secs': 0.182207} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.665168] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1935.665913] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1935.665913] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19fde29e-19f5-43e3-9fd4-1ded9c691e3f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.786985] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1935.791022] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1935.791022] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleting the datastore file [datastore1] 977fa519-2db3-4ee5-981d-c46820a8c72e {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1935.791022] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff6058d9-0a82-4b16-af0e-25571057f87f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.801425] env[63279]: DEBUG oslo_vmware.api [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 1935.801425] env[63279]: value = "task-2087027" [ 1935.801425] env[63279]: _type = "Task" [ 1935.801425] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.802108] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquiring lock "refresh_cache-81103d53-99fe-4d1a-816f-7685c59c80ee" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1935.802273] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquired lock "refresh_cache-81103d53-99fe-4d1a-816f-7685c59c80ee" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1935.802411] env[63279]: DEBUG nova.network.neutron [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1935.810815] env[63279]: DEBUG oslo_vmware.api [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087027, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.861018] env[63279]: INFO nova.virt.block_device [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Booting with volume 29ec2591-5e66-4668-ba47-c425dc65074c at /dev/sda [ 1935.927231] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1fe2c15c-b05c-4e21-935b-d7a8c38eafba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.935704] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641d19d9-2f51-434b-9c55-245d54784602 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.975591] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c65d19d-d2fc-42c8-89f9-728521751090 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.985157] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe021a5f-468e-4d6e-a94c-d281a5076c6b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.997683] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe01fd7b-a5e9-49b9-9035-271b9c1397e3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.005208] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9e0bf2-55e1-4165-8f29-eee9235ff19b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.048143] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53176885-a295-401f-9a34-aec9fbd59e7d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.051522] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b9967e-9b52-40dd-9c5d-b5a42a73e765 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.059772] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c656d5-d8e0-4610-9ebe-f264e4af11e8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.063018] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8907c4e-344c-400c-88ca-da2e63e10aeb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.077268] env[63279]: DEBUG nova.compute.provider_tree [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1936.085303] env[63279]: DEBUG nova.virt.block_device [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Updating existing volume attachment record: 73ec3629-9586-4baa-a690-5a1fe1201d4b {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1936.092914] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087025, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.155144] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087023, 'name': ReconfigVM_Task, 'duration_secs': 0.8682} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.155420] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Reconfigured VM instance instance-00000022 to attach disk [datastore1] de543869-8ab1-40ed-8f6d-dc506c257843/de543869-8ab1-40ed-8f6d-dc506c257843.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1936.156077] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a964cac-1346-48a8-8371-cbd520f1a46d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.163091] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Waiting for the task: (returnval){ [ 1936.163091] env[63279]: value = "task-2087028" [ 1936.163091] env[63279]: _type = "Task" [ 1936.163091] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.171052] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087028, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.315063] env[63279]: DEBUG oslo_vmware.api [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087027, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157675} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.315826] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1936.316184] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1936.316291] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1936.316473] env[63279]: INFO nova.compute.manager [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1936.316832] env[63279]: DEBUG oslo.service.loopingcall [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1936.316953] env[63279]: DEBUG nova.compute.manager [-] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1936.317724] env[63279]: DEBUG nova.network.neutron [-] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1936.372635] env[63279]: DEBUG nova.network.neutron [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1936.588429] env[63279]: DEBUG nova.scheduler.client.report [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1936.592691] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087025, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.593864] env[63279]: DEBUG nova.network.neutron [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Updating instance_info_cache with network_info: [{"id": "b059c76d-75a5-432c-b648-12c287223d83", "address": "fa:16:3e:bf:a3:83", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb059c76d-75", "ovs_interfaceid": "b059c76d-75a5-432c-b648-12c287223d83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1936.673924] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087028, 'name': Rename_Task, 'duration_secs': 0.275754} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.674282] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1936.674528] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bcb26fb7-86b2-4796-aa15-1b102c956a0e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.681965] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Waiting for the task: (returnval){ [ 1936.681965] env[63279]: value = "task-2087029" [ 1936.681965] env[63279]: _type = "Task" [ 1936.681965] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.690749] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087029, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.920028] env[63279]: DEBUG nova.compute.manager [req-4f914b41-ab63-494a-95b9-36ccead7abf7 req-268fe330-12a9-4eef-8844-a44ff4c08b36 service nova] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Received event network-vif-deleted-a243284f-0df1-41a1-b8e4-a8d486dd934e {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1936.920755] env[63279]: INFO nova.compute.manager [req-4f914b41-ab63-494a-95b9-36ccead7abf7 req-268fe330-12a9-4eef-8844-a44ff4c08b36 service nova] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Neutron deleted interface a243284f-0df1-41a1-b8e4-a8d486dd934e; detaching it from the instance and deleting it from the info cache [ 1936.920755] env[63279]: DEBUG nova.network.neutron [req-4f914b41-ab63-494a-95b9-36ccead7abf7 req-268fe330-12a9-4eef-8844-a44ff4c08b36 service nova] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1937.088590] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087025, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.099021] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.752s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.099021] env[63279]: DEBUG nova.compute.manager [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1937.101291] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Releasing lock "refresh_cache-81103d53-99fe-4d1a-816f-7685c59c80ee" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1937.103093] env[63279]: DEBUG nova.compute.manager [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Instance network_info: |[{"id": "b059c76d-75a5-432c-b648-12c287223d83", "address": "fa:16:3e:bf:a3:83", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb059c76d-75", "ovs_interfaceid": "b059c76d-75a5-432c-b648-12c287223d83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1937.103093] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.206s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.103680] env[63279]: DEBUG nova.objects.instance [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Lazy-loading 'resources' on Instance uuid a8107fa5-9e8b-41dd-9679-8e106a3496a5 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1937.108018] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:a3:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b059c76d-75a5-432c-b648-12c287223d83', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1937.115013] env[63279]: DEBUG oslo.service.loopingcall [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1937.115953] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1937.116314] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af9e89c9-905c-47f0-80f2-4ecf8d19d833 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.140181] env[63279]: DEBUG nova.compute.manager [req-c994dd36-950f-4ded-aa86-bcdc7f6c4aed req-64e65a95-1622-4687-a97d-ed1053f197fa service nova] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Received event network-changed-b059c76d-75a5-432c-b648-12c287223d83 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1937.140832] env[63279]: DEBUG nova.compute.manager [req-c994dd36-950f-4ded-aa86-bcdc7f6c4aed req-64e65a95-1622-4687-a97d-ed1053f197fa service nova] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Refreshing instance network info cache due to event network-changed-b059c76d-75a5-432c-b648-12c287223d83. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1937.141274] env[63279]: DEBUG oslo_concurrency.lockutils [req-c994dd36-950f-4ded-aa86-bcdc7f6c4aed req-64e65a95-1622-4687-a97d-ed1053f197fa service nova] Acquiring lock "refresh_cache-81103d53-99fe-4d1a-816f-7685c59c80ee" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1937.141841] env[63279]: DEBUG oslo_concurrency.lockutils [req-c994dd36-950f-4ded-aa86-bcdc7f6c4aed req-64e65a95-1622-4687-a97d-ed1053f197fa service nova] Acquired lock "refresh_cache-81103d53-99fe-4d1a-816f-7685c59c80ee" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1937.141841] env[63279]: DEBUG nova.network.neutron [req-c994dd36-950f-4ded-aa86-bcdc7f6c4aed req-64e65a95-1622-4687-a97d-ed1053f197fa service nova] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Refreshing network info cache for port b059c76d-75a5-432c-b648-12c287223d83 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1937.152074] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1937.152074] env[63279]: value = "task-2087030" [ 1937.152074] env[63279]: _type = "Task" [ 1937.152074] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.165813] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087030, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.196209] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087029, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.326467] env[63279]: DEBUG nova.network.neutron [-] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1937.427904] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-468c0943-57d6-41ec-9379-1da0122eb069 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.437527] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b22fe1-a765-4804-a87f-f9d85e91f5a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.478420] env[63279]: DEBUG nova.compute.manager [req-4f914b41-ab63-494a-95b9-36ccead7abf7 req-268fe330-12a9-4eef-8844-a44ff4c08b36 service nova] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Detach interface failed, port_id=a243284f-0df1-41a1-b8e4-a8d486dd934e, reason: Instance 977fa519-2db3-4ee5-981d-c46820a8c72e could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 1937.550041] env[63279]: DEBUG nova.network.neutron [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Successfully updated port: 7dc16370-0621-49ff-9730-abdfd18ff164 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1937.589537] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087025, 'name': CloneVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.611563] env[63279]: DEBUG nova.compute.utils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1937.613295] env[63279]: DEBUG nova.compute.manager [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1937.613533] env[63279]: DEBUG nova.network.neutron [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1937.663340] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087030, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.671020] env[63279]: DEBUG nova.policy [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1174834987d2495981d95919dbc33604', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '899d2a5422ed4fdcae11793be5f03f04', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1937.695425] env[63279]: DEBUG oslo_vmware.api [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087029, 'name': PowerOnVM_Task, 'duration_secs': 0.624834} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.701718] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1937.701957] env[63279]: INFO nova.compute.manager [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Took 8.01 seconds to spawn the instance on the hypervisor. [ 1937.702160] env[63279]: DEBUG nova.compute.manager [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1937.703550] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58042537-5b76-4713-a4ef-b2c5bc9f44bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.833321] env[63279]: INFO nova.compute.manager [-] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Took 1.51 seconds to deallocate network for instance. [ 1938.016819] env[63279]: DEBUG nova.network.neutron [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Successfully created port: 79f1cbee-348d-4f65-807f-291c2a96aa88 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1938.053179] env[63279]: DEBUG oslo_concurrency.lockutils [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Acquiring lock "refresh_cache-f375b54b-f9de-4529-b752-52c240aed532" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1938.053179] env[63279]: DEBUG oslo_concurrency.lockutils [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Acquired lock "refresh_cache-f375b54b-f9de-4529-b752-52c240aed532" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1938.053179] env[63279]: DEBUG nova.network.neutron [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1938.085761] env[63279]: DEBUG nova.network.neutron [req-c994dd36-950f-4ded-aa86-bcdc7f6c4aed req-64e65a95-1622-4687-a97d-ed1053f197fa service nova] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Updated VIF entry in instance network info cache for port b059c76d-75a5-432c-b648-12c287223d83. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1938.086111] env[63279]: DEBUG nova.network.neutron [req-c994dd36-950f-4ded-aa86-bcdc7f6c4aed req-64e65a95-1622-4687-a97d-ed1053f197fa service nova] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Updating instance_info_cache with network_info: [{"id": "b059c76d-75a5-432c-b648-12c287223d83", "address": "fa:16:3e:bf:a3:83", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb059c76d-75", "ovs_interfaceid": "b059c76d-75a5-432c-b648-12c287223d83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1938.094601] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087025, 'name': CloneVM_Task, 'duration_secs': 2.039658} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.095237] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Created linked-clone VM from snapshot [ 1938.096435] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d106e358-d5d0-41f7-b6be-602818258929 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.112980] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Uploading image f5563867-31c5-4766-b374-f1fdf29bdd84 {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1938.117790] env[63279]: DEBUG nova.compute.manager [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1938.140289] env[63279]: DEBUG oslo_vmware.rw_handles [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1938.140289] env[63279]: value = "vm-427594" [ 1938.140289] env[63279]: _type = "VirtualMachine" [ 1938.140289] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1938.141044] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-68d1a936-3f22-4619-8cc5-eeb30a745515 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.150346] env[63279]: DEBUG oslo_vmware.rw_handles [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lease: (returnval){ [ 1938.150346] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523ccd4d-d6cb-2e83-c2ef-e257edf82669" [ 1938.150346] env[63279]: _type = "HttpNfcLease" [ 1938.150346] env[63279]: } obtained for exporting VM: (result){ [ 1938.150346] env[63279]: value = "vm-427594" [ 1938.150346] env[63279]: _type = "VirtualMachine" [ 1938.150346] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1938.151254] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the lease: (returnval){ [ 1938.151254] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523ccd4d-d6cb-2e83-c2ef-e257edf82669" [ 1938.151254] env[63279]: _type = "HttpNfcLease" [ 1938.151254] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1938.167993] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1938.167993] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523ccd4d-d6cb-2e83-c2ef-e257edf82669" [ 1938.167993] env[63279]: _type = "HttpNfcLease" [ 1938.167993] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1938.168515] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087030, 'name': CreateVM_Task, 'duration_secs': 0.586119} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.168642] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1938.169437] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1938.169626] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1938.169947] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1938.170460] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3760208b-01b8-418a-ad18-41e230291ab5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.175894] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1938.175894] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a0ebdf-5901-864a-4b81-2bc02ca56253" [ 1938.175894] env[63279]: _type = "Task" [ 1938.175894] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.186878] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a0ebdf-5901-864a-4b81-2bc02ca56253, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.224803] env[63279]: DEBUG nova.compute.manager [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1938.224803] env[63279]: DEBUG nova.virt.hardware [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1938.224803] env[63279]: DEBUG nova.virt.hardware [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1938.224803] env[63279]: DEBUG nova.virt.hardware [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1938.224986] env[63279]: DEBUG nova.virt.hardware [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1938.224986] env[63279]: DEBUG nova.virt.hardware [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1938.224986] env[63279]: DEBUG nova.virt.hardware [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1938.224986] env[63279]: DEBUG nova.virt.hardware [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1938.225164] env[63279]: DEBUG nova.virt.hardware [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1938.225261] env[63279]: DEBUG nova.virt.hardware [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1938.225424] env[63279]: DEBUG nova.virt.hardware [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1938.225730] env[63279]: DEBUG nova.virt.hardware [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1938.230693] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b22faec-5182-4cb3-a7d0-6e93773c912d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.233558] env[63279]: INFO nova.compute.manager [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Took 51.12 seconds to build instance. [ 1938.241531] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8763f9c5-f559-40e8-b7a6-b4c63815ee96 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.306397] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c25579-0b8a-4b89-9678-c610995daf39 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.312684] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f444fa4-d311-4818-a3fe-be489dff548a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.355651] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1938.356730] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57a8567-8aff-4194-90b0-b1ad79e6ad17 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.364951] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747b96f7-1c33-427b-8646-8f9c34831ad6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.380699] env[63279]: DEBUG nova.compute.provider_tree [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1938.590207] env[63279]: DEBUG oslo_concurrency.lockutils [req-c994dd36-950f-4ded-aa86-bcdc7f6c4aed req-64e65a95-1622-4687-a97d-ed1053f197fa service nova] Releasing lock "refresh_cache-81103d53-99fe-4d1a-816f-7685c59c80ee" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.626397] env[63279]: DEBUG nova.network.neutron [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1938.660947] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1938.660947] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523ccd4d-d6cb-2e83-c2ef-e257edf82669" [ 1938.660947] env[63279]: _type = "HttpNfcLease" [ 1938.660947] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1938.661960] env[63279]: DEBUG oslo_vmware.rw_handles [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1938.661960] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523ccd4d-d6cb-2e83-c2ef-e257edf82669" [ 1938.661960] env[63279]: _type = "HttpNfcLease" [ 1938.661960] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1938.661960] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a64415-0a54-4f20-8f18-d488d1b7d052 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.670397] env[63279]: DEBUG oslo_vmware.rw_handles [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520a359c-b81b-0031-5fef-ff578add4e25/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1938.670617] env[63279]: DEBUG oslo_vmware.rw_handles [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520a359c-b81b-0031-5fef-ff578add4e25/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1938.738716] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e6033c0c-bf3a-4f54-ac44-9ee1b876d8b8 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Lock "de543869-8ab1-40ed-8f6d-dc506c257843" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.669s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1938.749046] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a0ebdf-5901-864a-4b81-2bc02ca56253, 'name': SearchDatastore_Task, 'duration_secs': 0.012692} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.749363] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.749601] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1938.749835] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1938.749985] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1938.754065] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1938.754065] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f0c2d56-01c6-479b-b958-6a1d504bb36c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.760124] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1938.760312] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1938.761323] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b273eb40-33a1-40e7-8c3e-5b656530c244 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.766953] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1938.766953] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52783da5-faa9-c279-2097-b807d2910fc5" [ 1938.766953] env[63279]: _type = "Task" [ 1938.766953] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.779154] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52783da5-faa9-c279-2097-b807d2910fc5, 'name': SearchDatastore_Task, 'duration_secs': 0.009439} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.782097] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e8fca88-8405-4be3-a0f1-c83f4664f800 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.784434] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8b572f73-9f15-4dc4-8cb1-9c7e3e85256e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.789296] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1938.789296] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5276eac2-b4c4-7836-514e-05e47548af2f" [ 1938.789296] env[63279]: _type = "Task" [ 1938.789296] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.798460] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5276eac2-b4c4-7836-514e-05e47548af2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.884388] env[63279]: DEBUG nova.scheduler.client.report [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1938.901105] env[63279]: DEBUG nova.network.neutron [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Updating instance_info_cache with network_info: [{"id": "7dc16370-0621-49ff-9730-abdfd18ff164", "address": "fa:16:3e:a3:62:97", "network": {"id": "6ac200ff-e42c-439c-8fa3-3b5e08a5d5e2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1487576264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "691f1159e7af42429cca85900fac343d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dc16370-06", "ovs_interfaceid": "7dc16370-0621-49ff-9730-abdfd18ff164", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1939.051764] env[63279]: DEBUG oslo_concurrency.lockutils [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquiring lock "de543869-8ab1-40ed-8f6d-dc506c257843" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.052092] env[63279]: DEBUG oslo_concurrency.lockutils [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Lock "de543869-8ab1-40ed-8f6d-dc506c257843" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.052400] env[63279]: INFO nova.compute.manager [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Rebooting instance [ 1939.131387] env[63279]: DEBUG nova.compute.manager [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1939.163685] env[63279]: DEBUG nova.virt.hardware [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1939.165286] env[63279]: DEBUG nova.virt.hardware [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1939.166240] env[63279]: DEBUG nova.virt.hardware [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1939.166240] env[63279]: DEBUG nova.virt.hardware [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1939.166240] env[63279]: DEBUG nova.virt.hardware [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1939.166240] env[63279]: DEBUG nova.virt.hardware [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1939.166240] env[63279]: DEBUG nova.virt.hardware [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1939.166461] env[63279]: DEBUG nova.virt.hardware [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1939.166596] env[63279]: DEBUG nova.virt.hardware [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1939.166819] env[63279]: DEBUG nova.virt.hardware [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1939.166987] env[63279]: DEBUG nova.virt.hardware [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1939.168133] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8a8cd8-62b0-4ee7-82e4-de78ac041f56 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.177250] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8194a335-8add-464d-982f-41deb5ec6986 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.246397] env[63279]: DEBUG nova.compute.manager [None req-930e8b72-e313-47d3-971d-85e95e45c359 tempest-ServersListShow296Test-756193091 tempest-ServersListShow296Test-756193091-project-member] [instance: d7eea629-0c82-4f56-8a6c-86d18a70814d] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1939.303883] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5276eac2-b4c4-7836-514e-05e47548af2f, 'name': SearchDatastore_Task, 'duration_secs': 0.00917} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1939.304310] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1939.305820] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 81103d53-99fe-4d1a-816f-7685c59c80ee/81103d53-99fe-4d1a-816f-7685c59c80ee.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1939.306182] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6743445-774d-4847-88c4-94e0f735e000 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.316652] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1939.316652] env[63279]: value = "task-2087032" [ 1939.316652] env[63279]: _type = "Task" [ 1939.316652] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.332155] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087032, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.390347] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.287s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.398725] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.162s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.400039] env[63279]: INFO nova.compute.claims [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1939.406561] env[63279]: DEBUG oslo_concurrency.lockutils [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Releasing lock "refresh_cache-f375b54b-f9de-4529-b752-52c240aed532" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1939.407017] env[63279]: DEBUG nova.compute.manager [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Instance network_info: |[{"id": "7dc16370-0621-49ff-9730-abdfd18ff164", "address": "fa:16:3e:a3:62:97", "network": {"id": "6ac200ff-e42c-439c-8fa3-3b5e08a5d5e2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1487576264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "691f1159e7af42429cca85900fac343d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dc16370-06", "ovs_interfaceid": "7dc16370-0621-49ff-9730-abdfd18ff164", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1939.407543] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:62:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69f65356-c85e-4b7f-ad28-7c7b5e8cf50c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7dc16370-0621-49ff-9730-abdfd18ff164', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1939.417519] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Creating folder: Project (691f1159e7af42429cca85900fac343d). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1939.417960] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31955d66-627a-4396-bbcf-8c353eda26b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.435370] env[63279]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1939.437032] env[63279]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63279) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1939.437250] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Folder already exists: Project (691f1159e7af42429cca85900fac343d). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1939.437472] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Creating folder: Instances. Parent ref: group-v427526. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1939.437715] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57462798-223d-4d68-826e-a91f2b2397c2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.451972] env[63279]: INFO nova.scheduler.client.report [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Deleted allocations for instance a8107fa5-9e8b-41dd-9679-8e106a3496a5 [ 1939.458449] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Created folder: Instances in parent group-v427526. [ 1939.458677] env[63279]: DEBUG oslo.service.loopingcall [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1939.464749] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f375b54b-f9de-4529-b752-52c240aed532] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1939.465117] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9159d8e-fde6-4b06-acdb-54220b1b3837 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.497368] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1939.497368] env[63279]: value = "task-2087035" [ 1939.497368] env[63279]: _type = "Task" [ 1939.497368] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.511025] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087035, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.592449] env[63279]: DEBUG oslo_concurrency.lockutils [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquiring lock "refresh_cache-de543869-8ab1-40ed-8f6d-dc506c257843" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1939.592771] env[63279]: DEBUG oslo_concurrency.lockutils [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquired lock "refresh_cache-de543869-8ab1-40ed-8f6d-dc506c257843" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1939.592841] env[63279]: DEBUG nova.network.neutron [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1939.613027] env[63279]: DEBUG nova.compute.manager [req-a2ed70e3-0e14-4e1a-8040-372baa67b2f5 req-b22fdb53-c0d1-4195-822a-4a3f8a3d0695 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Received event network-vif-plugged-7dc16370-0621-49ff-9730-abdfd18ff164 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1939.613271] env[63279]: DEBUG oslo_concurrency.lockutils [req-a2ed70e3-0e14-4e1a-8040-372baa67b2f5 req-b22fdb53-c0d1-4195-822a-4a3f8a3d0695 service nova] Acquiring lock "f375b54b-f9de-4529-b752-52c240aed532-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.614768] env[63279]: DEBUG oslo_concurrency.lockutils [req-a2ed70e3-0e14-4e1a-8040-372baa67b2f5 req-b22fdb53-c0d1-4195-822a-4a3f8a3d0695 service nova] Lock "f375b54b-f9de-4529-b752-52c240aed532-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.614960] env[63279]: DEBUG oslo_concurrency.lockutils [req-a2ed70e3-0e14-4e1a-8040-372baa67b2f5 req-b22fdb53-c0d1-4195-822a-4a3f8a3d0695 service nova] Lock "f375b54b-f9de-4529-b752-52c240aed532-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.615185] env[63279]: DEBUG nova.compute.manager [req-a2ed70e3-0e14-4e1a-8040-372baa67b2f5 req-b22fdb53-c0d1-4195-822a-4a3f8a3d0695 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] No waiting events found dispatching network-vif-plugged-7dc16370-0621-49ff-9730-abdfd18ff164 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1939.615374] env[63279]: WARNING nova.compute.manager [req-a2ed70e3-0e14-4e1a-8040-372baa67b2f5 req-b22fdb53-c0d1-4195-822a-4a3f8a3d0695 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Received unexpected event network-vif-plugged-7dc16370-0621-49ff-9730-abdfd18ff164 for instance with vm_state building and task_state spawning. [ 1939.617146] env[63279]: DEBUG nova.compute.manager [req-a2ed70e3-0e14-4e1a-8040-372baa67b2f5 req-b22fdb53-c0d1-4195-822a-4a3f8a3d0695 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Received event network-changed-7dc16370-0621-49ff-9730-abdfd18ff164 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1939.617146] env[63279]: DEBUG nova.compute.manager [req-a2ed70e3-0e14-4e1a-8040-372baa67b2f5 req-b22fdb53-c0d1-4195-822a-4a3f8a3d0695 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Refreshing instance network info cache due to event network-changed-7dc16370-0621-49ff-9730-abdfd18ff164. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1939.617146] env[63279]: DEBUG oslo_concurrency.lockutils [req-a2ed70e3-0e14-4e1a-8040-372baa67b2f5 req-b22fdb53-c0d1-4195-822a-4a3f8a3d0695 service nova] Acquiring lock "refresh_cache-f375b54b-f9de-4529-b752-52c240aed532" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1939.617146] env[63279]: DEBUG oslo_concurrency.lockutils [req-a2ed70e3-0e14-4e1a-8040-372baa67b2f5 req-b22fdb53-c0d1-4195-822a-4a3f8a3d0695 service nova] Acquired lock "refresh_cache-f375b54b-f9de-4529-b752-52c240aed532" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1939.617510] env[63279]: DEBUG nova.network.neutron [req-a2ed70e3-0e14-4e1a-8040-372baa67b2f5 req-b22fdb53-c0d1-4195-822a-4a3f8a3d0695 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Refreshing network info cache for port 7dc16370-0621-49ff-9730-abdfd18ff164 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1939.751583] env[63279]: DEBUG nova.compute.manager [None req-930e8b72-e313-47d3-971d-85e95e45c359 tempest-ServersListShow296Test-756193091 tempest-ServersListShow296Test-756193091-project-member] [instance: d7eea629-0c82-4f56-8a6c-86d18a70814d] Instance disappeared before build. {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 1939.831507] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087032, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.964907] env[63279]: DEBUG nova.network.neutron [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Successfully updated port: 79f1cbee-348d-4f65-807f-291c2a96aa88 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1939.999722] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93bcea3c-40fd-4c56-b396-c1827de295f1 tempest-InstanceActionsNegativeTestJSON-35594143 tempest-InstanceActionsNegativeTestJSON-35594143-project-member] Lock "a8107fa5-9e8b-41dd-9679-8e106a3496a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.576s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.015024] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087035, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.042774] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Acquiring lock "ff9701ed-d545-44b4-911a-c4d809d0a771" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.043031] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Lock "ff9701ed-d545-44b4-911a-c4d809d0a771" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.266388] env[63279]: DEBUG oslo_concurrency.lockutils [None req-930e8b72-e313-47d3-971d-85e95e45c359 tempest-ServersListShow296Test-756193091 tempest-ServersListShow296Test-756193091-project-member] Lock "d7eea629-0c82-4f56-8a6c-86d18a70814d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.471s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.332100] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087032, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.441748] env[63279]: DEBUG nova.network.neutron [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Updating instance_info_cache with network_info: [{"id": "c8a557ed-47ef-46c1-a803-10a8150cb66a", "address": "fa:16:3e:80:45:7c", "network": {"id": "b0988280-189d-4f04-95fc-fd0a9b112434", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1478095758-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "685b5deb728448eb9eb023905d680288", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8a557ed-47", "ovs_interfaceid": "c8a557ed-47ef-46c1-a803-10a8150cb66a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1940.447081] env[63279]: DEBUG nova.network.neutron [req-a2ed70e3-0e14-4e1a-8040-372baa67b2f5 req-b22fdb53-c0d1-4195-822a-4a3f8a3d0695 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Updated VIF entry in instance network info cache for port 7dc16370-0621-49ff-9730-abdfd18ff164. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1940.447544] env[63279]: DEBUG nova.network.neutron [req-a2ed70e3-0e14-4e1a-8040-372baa67b2f5 req-b22fdb53-c0d1-4195-822a-4a3f8a3d0695 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Updating instance_info_cache with network_info: [{"id": "7dc16370-0621-49ff-9730-abdfd18ff164", "address": "fa:16:3e:a3:62:97", "network": {"id": "6ac200ff-e42c-439c-8fa3-3b5e08a5d5e2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1487576264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "691f1159e7af42429cca85900fac343d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dc16370-06", "ovs_interfaceid": "7dc16370-0621-49ff-9730-abdfd18ff164", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1940.471692] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Acquiring lock "refresh_cache-e04f06de-da6a-4562-a50a-ff16bf3a006e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1940.471877] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Acquired lock "refresh_cache-e04f06de-da6a-4562-a50a-ff16bf3a006e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1940.472056] env[63279]: DEBUG nova.network.neutron [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1940.512657] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087035, 'name': CreateVM_Task, 'duration_secs': 0.683245} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.515968] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f375b54b-f9de-4529-b752-52c240aed532] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1940.517597] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sda', 'delete_on_termination': True, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427536', 'volume_id': '29ec2591-5e66-4668-ba47-c425dc65074c', 'name': 'volume-29ec2591-5e66-4668-ba47-c425dc65074c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f375b54b-f9de-4529-b752-52c240aed532', 'attached_at': '', 'detached_at': '', 'volume_id': '29ec2591-5e66-4668-ba47-c425dc65074c', 'serial': '29ec2591-5e66-4668-ba47-c425dc65074c'}, 'boot_index': 0, 'device_type': None, 'attachment_id': '73ec3629-9586-4baa-a690-5a1fe1201d4b', 'volume_type': None}], 'swap': None} {{(pid=63279) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1940.517597] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Root volume attach. Driver type: vmdk {{(pid=63279) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1940.518342] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-762cc586-7507-4603-a50a-f427921cd8f7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.527780] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9425f894-0aba-43c9-a7b5-ff5ac0809bfa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.538085] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606515d4-0229-444d-a6b5-e461800585fe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.547693] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-d526f789-dae2-4be1-ba17-b70a8134ea9f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.556280] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Waiting for the task: (returnval){ [ 1940.556280] env[63279]: value = "task-2087036" [ 1940.556280] env[63279]: _type = "Task" [ 1940.556280] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.569933] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087036, 'name': RelocateVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.698024] env[63279]: DEBUG nova.objects.instance [None req-f96e6fd1-b8ca-440e-b887-29f99f8f271c tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Lazy-loading 'flavor' on Instance uuid fcc5a636-554f-424e-a604-a8e7bd7cf574 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1940.770877] env[63279]: DEBUG nova.compute.manager [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1940.836117] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087032, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.957657] env[63279]: DEBUG oslo_concurrency.lockutils [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Releasing lock "refresh_cache-de543869-8ab1-40ed-8f6d-dc506c257843" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1940.962454] env[63279]: DEBUG oslo_concurrency.lockutils [req-a2ed70e3-0e14-4e1a-8040-372baa67b2f5 req-b22fdb53-c0d1-4195-822a-4a3f8a3d0695 service nova] Releasing lock "refresh_cache-f375b54b-f9de-4529-b752-52c240aed532" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1941.040609] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5319362b-a5ef-4607-ade4-0fbc31136106 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.044961] env[63279]: DEBUG nova.network.neutron [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1941.052856] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d97b736-8d03-4c65-a2f2-09f4334c2a4c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.089827] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320b78b7-3c84-4f89-82d4-76d8d9b72d1d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.096410] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087036, 'name': RelocateVM_Task} progress is 20%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.102136] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdeda2b1-06dd-4874-8d83-9e9a1b80e021 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.116765] env[63279]: DEBUG nova.compute.provider_tree [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1941.203193] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f96e6fd1-b8ca-440e-b887-29f99f8f271c tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquiring lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1941.203193] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f96e6fd1-b8ca-440e-b887-29f99f8f271c tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquired lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1941.296663] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.331838] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087032, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.567915} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.332287] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 81103d53-99fe-4d1a-816f-7685c59c80ee/81103d53-99fe-4d1a-816f-7685c59c80ee.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1941.332761] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1941.333190] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc0418e8-d300-412f-81b6-18ad60914984 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.340990] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1941.340990] env[63279]: value = "task-2087037" [ 1941.340990] env[63279]: _type = "Task" [ 1941.340990] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.353698] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087037, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.353698] env[63279]: DEBUG nova.network.neutron [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Updating instance_info_cache with network_info: [{"id": "79f1cbee-348d-4f65-807f-291c2a96aa88", "address": "fa:16:3e:ec:9f:eb", "network": {"id": "baccbbcb-94b8-4feb-9429-077e1d18a7cb", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-43800213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "899d2a5422ed4fdcae11793be5f03f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bafe8721-91d4-4127-b215-d9e8e27947dc", "external-id": "nsx-vlan-transportzone-680", "segmentation_id": 680, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79f1cbee-34", "ovs_interfaceid": "79f1cbee-348d-4f65-807f-291c2a96aa88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1941.465314] env[63279]: DEBUG nova.compute.manager [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1941.466085] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a28b1c6-2744-42a9-8518-d001852c7b8d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.567148] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087036, 'name': RelocateVM_Task, 'duration_secs': 0.673649} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.567290] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1941.567593] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427536', 'volume_id': '29ec2591-5e66-4668-ba47-c425dc65074c', 'name': 'volume-29ec2591-5e66-4668-ba47-c425dc65074c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f375b54b-f9de-4529-b752-52c240aed532', 'attached_at': '', 'detached_at': '', 'volume_id': '29ec2591-5e66-4668-ba47-c425dc65074c', 'serial': '29ec2591-5e66-4668-ba47-c425dc65074c'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1941.568262] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bebe1b5-b026-4141-855c-5237f9a952fa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.586974] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabaaa2b-8806-469f-8755-b95ee1ce851e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.617961] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] volume-29ec2591-5e66-4668-ba47-c425dc65074c/volume-29ec2591-5e66-4668-ba47-c425dc65074c.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1941.621800] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b672f93-f4ee-4db8-8d36-c16fb92c0571 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.633107] env[63279]: DEBUG nova.scheduler.client.report [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1941.642396] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Waiting for the task: (returnval){ [ 1941.642396] env[63279]: value = "task-2087038" [ 1941.642396] env[63279]: _type = "Task" [ 1941.642396] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.651096] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087038, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.764014] env[63279]: DEBUG nova.compute.manager [req-4e9ad119-7a5e-4268-b3f0-2ed6ca36228b req-b970d728-57b7-402b-ac8a-ee8c7d2bf60b service nova] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Received event network-vif-plugged-79f1cbee-348d-4f65-807f-291c2a96aa88 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1941.764326] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e9ad119-7a5e-4268-b3f0-2ed6ca36228b req-b970d728-57b7-402b-ac8a-ee8c7d2bf60b service nova] Acquiring lock "e04f06de-da6a-4562-a50a-ff16bf3a006e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.764448] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e9ad119-7a5e-4268-b3f0-2ed6ca36228b req-b970d728-57b7-402b-ac8a-ee8c7d2bf60b service nova] Lock "e04f06de-da6a-4562-a50a-ff16bf3a006e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.764619] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e9ad119-7a5e-4268-b3f0-2ed6ca36228b req-b970d728-57b7-402b-ac8a-ee8c7d2bf60b service nova] Lock "e04f06de-da6a-4562-a50a-ff16bf3a006e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.764786] env[63279]: DEBUG nova.compute.manager [req-4e9ad119-7a5e-4268-b3f0-2ed6ca36228b req-b970d728-57b7-402b-ac8a-ee8c7d2bf60b service nova] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] No waiting events found dispatching network-vif-plugged-79f1cbee-348d-4f65-807f-291c2a96aa88 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1941.764956] env[63279]: WARNING nova.compute.manager [req-4e9ad119-7a5e-4268-b3f0-2ed6ca36228b req-b970d728-57b7-402b-ac8a-ee8c7d2bf60b service nova] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Received unexpected event network-vif-plugged-79f1cbee-348d-4f65-807f-291c2a96aa88 for instance with vm_state building and task_state spawning. [ 1941.765134] env[63279]: DEBUG nova.compute.manager [req-4e9ad119-7a5e-4268-b3f0-2ed6ca36228b req-b970d728-57b7-402b-ac8a-ee8c7d2bf60b service nova] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Received event network-changed-79f1cbee-348d-4f65-807f-291c2a96aa88 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1941.765291] env[63279]: DEBUG nova.compute.manager [req-4e9ad119-7a5e-4268-b3f0-2ed6ca36228b req-b970d728-57b7-402b-ac8a-ee8c7d2bf60b service nova] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Refreshing instance network info cache due to event network-changed-79f1cbee-348d-4f65-807f-291c2a96aa88. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1941.765466] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e9ad119-7a5e-4268-b3f0-2ed6ca36228b req-b970d728-57b7-402b-ac8a-ee8c7d2bf60b service nova] Acquiring lock "refresh_cache-e04f06de-da6a-4562-a50a-ff16bf3a006e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1941.850893] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087037, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078715} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.851180] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1941.851948] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9205d345-8fdd-4bef-b451-4a0e0cf2971b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.854953] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Releasing lock "refresh_cache-e04f06de-da6a-4562-a50a-ff16bf3a006e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1941.855291] env[63279]: DEBUG nova.compute.manager [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Instance network_info: |[{"id": "79f1cbee-348d-4f65-807f-291c2a96aa88", "address": "fa:16:3e:ec:9f:eb", "network": {"id": "baccbbcb-94b8-4feb-9429-077e1d18a7cb", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-43800213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "899d2a5422ed4fdcae11793be5f03f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bafe8721-91d4-4127-b215-d9e8e27947dc", "external-id": "nsx-vlan-transportzone-680", "segmentation_id": 680, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79f1cbee-34", "ovs_interfaceid": "79f1cbee-348d-4f65-807f-291c2a96aa88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1941.855570] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e9ad119-7a5e-4268-b3f0-2ed6ca36228b req-b970d728-57b7-402b-ac8a-ee8c7d2bf60b service nova] Acquired lock "refresh_cache-e04f06de-da6a-4562-a50a-ff16bf3a006e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1941.855748] env[63279]: DEBUG nova.network.neutron [req-4e9ad119-7a5e-4268-b3f0-2ed6ca36228b req-b970d728-57b7-402b-ac8a-ee8c7d2bf60b service nova] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Refreshing network info cache for port 79f1cbee-348d-4f65-807f-291c2a96aa88 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1941.857050] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:9f:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bafe8721-91d4-4127-b215-d9e8e27947dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79f1cbee-348d-4f65-807f-291c2a96aa88', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1941.867325] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Creating folder: Project (899d2a5422ed4fdcae11793be5f03f04). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1941.879251] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02cb0fc5-10d3-4f56-b1a9-33d02f3022fe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.889697] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 81103d53-99fe-4d1a-816f-7685c59c80ee/81103d53-99fe-4d1a-816f-7685c59c80ee.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1941.890372] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8c7a9b9-0785-4557-89c1-68fe5d98a4ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.911379] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1941.911379] env[63279]: value = "task-2087040" [ 1941.911379] env[63279]: _type = "Task" [ 1941.911379] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.915706] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Created folder: Project (899d2a5422ed4fdcae11793be5f03f04) in parent group-v427491. [ 1941.915900] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Creating folder: Instances. Parent ref: group-v427598. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1941.916897] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a97ddcf8-9018-4748-9018-c0800b056c85 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.921723] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087040, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.929653] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Created folder: Instances in parent group-v427598. [ 1941.929889] env[63279]: DEBUG oslo.service.loopingcall [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1941.930095] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1941.930304] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5867afe8-951e-4ad4-b2f6-97bf48b948f9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.950188] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1941.950188] env[63279]: value = "task-2087042" [ 1941.950188] env[63279]: _type = "Task" [ 1941.950188] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.960265] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087042, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.983524] env[63279]: DEBUG nova.network.neutron [None req-f96e6fd1-b8ca-440e-b887-29f99f8f271c tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1942.140467] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.742s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.141047] env[63279]: DEBUG nova.compute.manager [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1942.143765] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.994s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1942.145355] env[63279]: INFO nova.compute.claims [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1942.158125] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.433200] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087040, 'name': ReconfigVM_Task, 'duration_secs': 0.313597} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.436661] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 81103d53-99fe-4d1a-816f-7685c59c80ee/81103d53-99fe-4d1a-816f-7685c59c80ee.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1942.436661] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55e294ff-4805-416f-924d-1fa1c760edad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.442765] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1942.442765] env[63279]: value = "task-2087043" [ 1942.442765] env[63279]: _type = "Task" [ 1942.442765] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.455169] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087043, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.463720] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087042, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.490031] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c50e57-411d-4cb4-b888-11fcda79e913 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.499085] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Doing hard reboot of VM {{(pid=63279) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1942.499592] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-3eeed1c6-673d-421f-8c0e-93790cd819cc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.507478] env[63279]: DEBUG oslo_vmware.api [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Waiting for the task: (returnval){ [ 1942.507478] env[63279]: value = "task-2087044" [ 1942.507478] env[63279]: _type = "Task" [ 1942.507478] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.519626] env[63279]: DEBUG oslo_vmware.api [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087044, 'name': ResetVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.650220] env[63279]: DEBUG nova.compute.utils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1942.672547] env[63279]: DEBUG nova.compute.manager [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1942.672547] env[63279]: DEBUG nova.network.neutron [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1942.691734] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087038, 'name': ReconfigVM_Task, 'duration_secs': 0.673198} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.692093] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Reconfigured VM instance instance-00000024 to attach disk [datastore1] volume-29ec2591-5e66-4668-ba47-c425dc65074c/volume-29ec2591-5e66-4668-ba47-c425dc65074c.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1942.697655] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abca8dd2-af0f-4bab-92d4-a549b8dd5da2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.722410] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Waiting for the task: (returnval){ [ 1942.722410] env[63279]: value = "task-2087045" [ 1942.722410] env[63279]: _type = "Task" [ 1942.722410] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.735592] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087045, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.796598] env[63279]: DEBUG nova.policy [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '554fb82376314a52b6439fbef362872f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e60a9f94cc8403eaa79a85a1a17160b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1942.805108] env[63279]: DEBUG nova.network.neutron [req-4e9ad119-7a5e-4268-b3f0-2ed6ca36228b req-b970d728-57b7-402b-ac8a-ee8c7d2bf60b service nova] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Updated VIF entry in instance network info cache for port 79f1cbee-348d-4f65-807f-291c2a96aa88. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1942.805453] env[63279]: DEBUG nova.network.neutron [req-4e9ad119-7a5e-4268-b3f0-2ed6ca36228b req-b970d728-57b7-402b-ac8a-ee8c7d2bf60b service nova] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Updating instance_info_cache with network_info: [{"id": "79f1cbee-348d-4f65-807f-291c2a96aa88", "address": "fa:16:3e:ec:9f:eb", "network": {"id": "baccbbcb-94b8-4feb-9429-077e1d18a7cb", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-43800213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "899d2a5422ed4fdcae11793be5f03f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bafe8721-91d4-4127-b215-d9e8e27947dc", "external-id": "nsx-vlan-transportzone-680", "segmentation_id": 680, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79f1cbee-34", "ovs_interfaceid": "79f1cbee-348d-4f65-807f-291c2a96aa88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.931429] env[63279]: DEBUG nova.network.neutron [None req-f96e6fd1-b8ca-440e-b887-29f99f8f271c tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Updating instance_info_cache with network_info: [{"id": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "address": "fa:16:3e:b1:26:30", "network": {"id": "50f30894-1239-497e-9f70-afa5b0c429ea", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-85780566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfdf69a97cf54d5cb8c4fb1c59b6a5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1063b89-be", "ovs_interfaceid": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.962593] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087043, 'name': Rename_Task, 'duration_secs': 0.163985} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.963579] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1942.966936] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c9985c9e-1a2f-4d49-8aa0-0e48c506a1b0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.972751] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087042, 'name': CreateVM_Task, 'duration_secs': 0.518094} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.973619] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1942.974407] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1942.974680] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1942.975256] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1942.975602] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22020239-3ec5-47af-ae19-0f9833b0d8ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.979412] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1942.979412] env[63279]: value = "task-2087046" [ 1942.979412] env[63279]: _type = "Task" [ 1942.979412] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.990017] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Waiting for the task: (returnval){ [ 1942.990017] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52cd97c0-85b1-92be-cbd7-b47090544831" [ 1942.990017] env[63279]: _type = "Task" [ 1942.990017] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.997991] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087046, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.005050] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52cd97c0-85b1-92be-cbd7-b47090544831, 'name': SearchDatastore_Task, 'duration_secs': 0.012383} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.008243] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1943.008659] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1943.009087] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1943.009571] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1943.009911] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1943.010835] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e8d64e4-1aae-4414-8c28-2aa7fbdaf499 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.025859] env[63279]: DEBUG oslo_vmware.api [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087044, 'name': ResetVM_Task, 'duration_secs': 0.10036} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.025859] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Did hard reboot of VM {{(pid=63279) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1943.025859] env[63279]: DEBUG nova.compute.manager [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1943.026325] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1943.026604] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1943.030633] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8e8c11-a872-4d95-8081-195e54d642fd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.034052] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a31af38-789a-410c-9558-16e9bddeb32a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.046732] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Waiting for the task: (returnval){ [ 1943.046732] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5230526d-14d5-64dc-2248-bdc378582690" [ 1943.046732] env[63279]: _type = "Task" [ 1943.046732] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.056660] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5230526d-14d5-64dc-2248-bdc378582690, 'name': SearchDatastore_Task, 'duration_secs': 0.01027} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.056660] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33d865ef-85d8-47fb-8747-a9f9b1446165 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.064296] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Waiting for the task: (returnval){ [ 1943.064296] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527a03e0-315b-7b7e-0544-01163cd80a79" [ 1943.064296] env[63279]: _type = "Task" [ 1943.064296] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.074361] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527a03e0-315b-7b7e-0544-01163cd80a79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.158461] env[63279]: DEBUG nova.network.neutron [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Successfully created port: 67e6934e-98a0-4355-80f1-869c87d01ea0 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1943.163370] env[63279]: DEBUG nova.compute.manager [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1943.243051] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087045, 'name': ReconfigVM_Task, 'duration_secs': 0.305435} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.243051] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427536', 'volume_id': '29ec2591-5e66-4668-ba47-c425dc65074c', 'name': 'volume-29ec2591-5e66-4668-ba47-c425dc65074c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f375b54b-f9de-4529-b752-52c240aed532', 'attached_at': '', 'detached_at': '', 'volume_id': '29ec2591-5e66-4668-ba47-c425dc65074c', 'serial': '29ec2591-5e66-4668-ba47-c425dc65074c'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1943.243575] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f6db0ef-9335-48f4-b5e4-62eed56ab4f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.253201] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Waiting for the task: (returnval){ [ 1943.253201] env[63279]: value = "task-2087047" [ 1943.253201] env[63279]: _type = "Task" [ 1943.253201] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.263148] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087047, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.309591] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e9ad119-7a5e-4268-b3f0-2ed6ca36228b req-b970d728-57b7-402b-ac8a-ee8c7d2bf60b service nova] Releasing lock "refresh_cache-e04f06de-da6a-4562-a50a-ff16bf3a006e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1943.399265] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c72c94c-2e6a-40d6-b387-e0cdf3b8d82e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.407416] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b998237-7023-4d3c-9957-dd703b631c79 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.449867] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f96e6fd1-b8ca-440e-b887-29f99f8f271c tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Releasing lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1943.450410] env[63279]: DEBUG nova.compute.manager [None req-f96e6fd1-b8ca-440e-b887-29f99f8f271c tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Inject network info {{(pid=63279) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1943.451206] env[63279]: DEBUG nova.compute.manager [None req-f96e6fd1-b8ca-440e-b887-29f99f8f271c tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] network_info to inject: |[{"id": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "address": "fa:16:3e:b1:26:30", "network": {"id": "50f30894-1239-497e-9f70-afa5b0c429ea", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-85780566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfdf69a97cf54d5cb8c4fb1c59b6a5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1063b89-be", "ovs_interfaceid": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1943.455570] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f96e6fd1-b8ca-440e-b887-29f99f8f271c tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Reconfiguring VM instance to set the machine id {{(pid=63279) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1943.456151] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c4235b8-7f7b-4c20-aeda-38b66cbb8802 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.466934] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf044465-1c25-47e3-89e3-2b89faacd017 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.477497] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5cca37-6a78-499c-a831-97af2c1615d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.481596] env[63279]: DEBUG oslo_vmware.api [None req-f96e6fd1-b8ca-440e-b887-29f99f8f271c tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Waiting for the task: (returnval){ [ 1943.481596] env[63279]: value = "task-2087048" [ 1943.481596] env[63279]: _type = "Task" [ 1943.481596] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.498532] env[63279]: DEBUG nova.compute.provider_tree [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1943.506333] env[63279]: DEBUG oslo_vmware.api [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087046, 'name': PowerOnVM_Task, 'duration_secs': 0.45712} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.511176] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1943.511700] env[63279]: INFO nova.compute.manager [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Took 9.95 seconds to spawn the instance on the hypervisor. [ 1943.511700] env[63279]: DEBUG nova.compute.manager [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1943.511936] env[63279]: DEBUG oslo_vmware.api [None req-f96e6fd1-b8ca-440e-b887-29f99f8f271c tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2087048, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.513319] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbe822f-8f6b-4934-8cd3-3d9411abd518 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.550395] env[63279]: DEBUG oslo_concurrency.lockutils [None req-23343be8-6e4e-4eb3-878f-52f6187e35e7 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Lock "de543869-8ab1-40ed-8f6d-dc506c257843" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.498s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1943.580021] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527a03e0-315b-7b7e-0544-01163cd80a79, 'name': SearchDatastore_Task, 'duration_secs': 0.008917} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.580021] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1943.580021] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] e04f06de-da6a-4562-a50a-ff16bf3a006e/e04f06de-da6a-4562-a50a-ff16bf3a006e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1943.580021] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4037a83a-8ca4-4c1c-b71e-675e4b5cdaa0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.588069] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Waiting for the task: (returnval){ [ 1943.588069] env[63279]: value = "task-2087049" [ 1943.588069] env[63279]: _type = "Task" [ 1943.588069] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.598019] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087049, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.724722] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Acquiring lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1943.725073] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1943.764744] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087047, 'name': Rename_Task, 'duration_secs': 0.178281} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.765124] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1943.765515] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84019c09-cd41-4e5e-9167-be92c5c8d9f1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.772722] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Waiting for the task: (returnval){ [ 1943.772722] env[63279]: value = "task-2087050" [ 1943.772722] env[63279]: _type = "Task" [ 1943.772722] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.782084] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087050, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.908353] env[63279]: DEBUG nova.compute.manager [req-1c22e200-e6a6-4dc2-97e3-9b0e6f62de24 req-e9ba7fdf-f60f-4dcb-9132-ad4762b39b9b service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Received event network-changed-e1063b89-be88-474a-a2ec-b61eb11cf9fe {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1943.908629] env[63279]: DEBUG nova.compute.manager [req-1c22e200-e6a6-4dc2-97e3-9b0e6f62de24 req-e9ba7fdf-f60f-4dcb-9132-ad4762b39b9b service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Refreshing instance network info cache due to event network-changed-e1063b89-be88-474a-a2ec-b61eb11cf9fe. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1943.908779] env[63279]: DEBUG oslo_concurrency.lockutils [req-1c22e200-e6a6-4dc2-97e3-9b0e6f62de24 req-e9ba7fdf-f60f-4dcb-9132-ad4762b39b9b service nova] Acquiring lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1943.908922] env[63279]: DEBUG oslo_concurrency.lockutils [req-1c22e200-e6a6-4dc2-97e3-9b0e6f62de24 req-e9ba7fdf-f60f-4dcb-9132-ad4762b39b9b service nova] Acquired lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1943.909514] env[63279]: DEBUG nova.network.neutron [req-1c22e200-e6a6-4dc2-97e3-9b0e6f62de24 req-e9ba7fdf-f60f-4dcb-9132-ad4762b39b9b service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Refreshing network info cache for port e1063b89-be88-474a-a2ec-b61eb11cf9fe {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1943.992953] env[63279]: DEBUG oslo_vmware.api [None req-f96e6fd1-b8ca-440e-b887-29f99f8f271c tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2087048, 'name': ReconfigVM_Task, 'duration_secs': 0.158351} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.993407] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f96e6fd1-b8ca-440e-b887-29f99f8f271c tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Reconfigured VM instance to set the machine id {{(pid=63279) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1944.001857] env[63279]: DEBUG nova.scheduler.client.report [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1944.030898] env[63279]: INFO nova.compute.manager [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Took 53.45 seconds to build instance. [ 1944.098205] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087049, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488939} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.098510] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] e04f06de-da6a-4562-a50a-ff16bf3a006e/e04f06de-da6a-4562-a50a-ff16bf3a006e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1944.098737] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1944.098994] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-025f045c-a8f1-43e6-ac48-85b12b2f2bf8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.106038] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Waiting for the task: (returnval){ [ 1944.106038] env[63279]: value = "task-2087051" [ 1944.106038] env[63279]: _type = "Task" [ 1944.106038] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.116870] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087051, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.173100] env[63279]: DEBUG nova.compute.manager [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1944.199700] env[63279]: DEBUG nova.virt.hardware [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1944.199944] env[63279]: DEBUG nova.virt.hardware [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1944.200110] env[63279]: DEBUG nova.virt.hardware [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1944.200294] env[63279]: DEBUG nova.virt.hardware [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1944.200437] env[63279]: DEBUG nova.virt.hardware [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1944.200580] env[63279]: DEBUG nova.virt.hardware [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1944.200785] env[63279]: DEBUG nova.virt.hardware [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1944.200945] env[63279]: DEBUG nova.virt.hardware [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1944.201542] env[63279]: DEBUG nova.virt.hardware [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1944.201542] env[63279]: DEBUG nova.virt.hardware [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1944.201780] env[63279]: DEBUG nova.virt.hardware [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1944.202705] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690cc08e-2192-4f20-a6f1-8b8990356e7c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.212932] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748bc888-e872-4fa2-bc0c-66eb9cd0b514 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.228186] env[63279]: DEBUG nova.compute.utils [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1944.286485] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087050, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.507088] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.363s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.507837] env[63279]: DEBUG nova.compute.manager [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1944.510072] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.852s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1944.511883] env[63279]: INFO nova.compute.claims [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1944.539644] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0a937d49-2bb5-4c2d-b540-fcaea2c0fe97 tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Lock "81103d53-99fe-4d1a-816f-7685c59c80ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.906s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.617067] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087051, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06285} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.617431] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1944.618182] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3f4aaf-125c-4cda-ba17-42c82a762c5f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.642706] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] e04f06de-da6a-4562-a50a-ff16bf3a006e/e04f06de-da6a-4562-a50a-ff16bf3a006e.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1944.643411] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59d1ac19-a9d6-459a-ad3c-e2c5e080e5d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.669405] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Waiting for the task: (returnval){ [ 1944.669405] env[63279]: value = "task-2087052" [ 1944.669405] env[63279]: _type = "Task" [ 1944.669405] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.679917] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087052, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.730803] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.783558] env[63279]: DEBUG oslo_vmware.api [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087050, 'name': PowerOnVM_Task, 'duration_secs': 0.798277} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.783771] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1944.783977] env[63279]: INFO nova.compute.manager [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Took 6.56 seconds to spawn the instance on the hypervisor. [ 1944.784382] env[63279]: DEBUG nova.compute.manager [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1944.785268] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f639c7-f471-49e0-90eb-7b4fa52bbceb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.790993] env[63279]: DEBUG nova.network.neutron [req-1c22e200-e6a6-4dc2-97e3-9b0e6f62de24 req-e9ba7fdf-f60f-4dcb-9132-ad4762b39b9b service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Updated VIF entry in instance network info cache for port e1063b89-be88-474a-a2ec-b61eb11cf9fe. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1944.791369] env[63279]: DEBUG nova.network.neutron [req-1c22e200-e6a6-4dc2-97e3-9b0e6f62de24 req-e9ba7fdf-f60f-4dcb-9132-ad4762b39b9b service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Updating instance_info_cache with network_info: [{"id": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "address": "fa:16:3e:b1:26:30", "network": {"id": "50f30894-1239-497e-9f70-afa5b0c429ea", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-85780566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfdf69a97cf54d5cb8c4fb1c59b6a5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1063b89-be", "ovs_interfaceid": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1945.020094] env[63279]: DEBUG nova.compute.utils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1945.024515] env[63279]: DEBUG nova.compute.manager [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1945.024515] env[63279]: DEBUG nova.network.neutron [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1945.042378] env[63279]: DEBUG nova.compute.manager [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1945.085704] env[63279]: DEBUG nova.policy [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ae52ac983874265be089fdc82c15b70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a6f286a404d44aab6d17264b25f49f0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1945.089947] env[63279]: DEBUG nova.network.neutron [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Successfully updated port: 67e6934e-98a0-4355-80f1-869c87d01ea0 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1945.183870] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087052, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.297953] env[63279]: DEBUG oslo_concurrency.lockutils [req-1c22e200-e6a6-4dc2-97e3-9b0e6f62de24 req-e9ba7fdf-f60f-4dcb-9132-ad4762b39b9b service nova] Releasing lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1945.312681] env[63279]: INFO nova.compute.manager [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Took 52.36 seconds to build instance. [ 1945.415982] env[63279]: DEBUG nova.network.neutron [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Successfully created port: b6a4dca7-a0ba-4270-8c1f-2afac4d449bb {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1946.256046] env[63279]: DEBUG nova.compute.manager [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1946.263857] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Acquiring lock "refresh_cache-cf1b70af-335d-404b-bb4f-fe082dd6f450" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.264046] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Acquired lock "refresh_cache-cf1b70af-335d-404b-bb4f-fe082dd6f450" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1946.264203] env[63279]: DEBUG nova.network.neutron [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1946.269022] env[63279]: DEBUG oslo_concurrency.lockutils [None req-42b39ba1-997a-4f62-aa19-97a6fd24e99a tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Lock "f375b54b-f9de-4529-b752-52c240aed532" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.816s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.269022] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquiring lock "de543869-8ab1-40ed-8f6d-dc506c257843" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.269022] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Lock "de543869-8ab1-40ed-8f6d-dc506c257843" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.269022] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquiring lock "de543869-8ab1-40ed-8f6d-dc506c257843-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.269419] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Lock "de543869-8ab1-40ed-8f6d-dc506c257843-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.269419] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Lock "de543869-8ab1-40ed-8f6d-dc506c257843-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.269419] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Acquiring lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.269513] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.269713] env[63279]: INFO nova.compute.manager [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Attaching volume 977ada7e-edf6-44b6-8cb5-41e0baf8daf1 to /dev/sdb [ 1946.272732] env[63279]: DEBUG nova.objects.instance [None req-d128cf11-2d83-47b1-b6c4-be749dcd84f8 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Lazy-loading 'flavor' on Instance uuid fcc5a636-554f-424e-a604-a8e7bd7cf574 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1946.298046] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087052, 'name': ReconfigVM_Task, 'duration_secs': 0.817196} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.298046] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Reconfigured VM instance instance-00000025 to attach disk [datastore1] e04f06de-da6a-4562-a50a-ff16bf3a006e/e04f06de-da6a-4562-a50a-ff16bf3a006e.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1946.298260] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38cd63c2-4f6a-447a-a91a-e01127473d61 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.307608] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Waiting for the task: (returnval){ [ 1946.307608] env[63279]: value = "task-2087053" [ 1946.307608] env[63279]: _type = "Task" [ 1946.307608] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.313318] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.318177] env[63279]: INFO nova.compute.manager [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Terminating instance [ 1946.326543] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087053, 'name': Rename_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.345930] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f210dbe5-8cca-4586-92e0-3d0c4274ac4b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.358845] env[63279]: DEBUG nova.network.neutron [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1946.363847] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1f05bc-8110-41ce-b0d8-9e3d4af7da67 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.380202] env[63279]: DEBUG nova.virt.block_device [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Updating existing volume attachment record: 3ac7f8e8-db1c-4ff6-b022-81319bf67a8c {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1946.611088] env[63279]: DEBUG nova.compute.manager [req-e52ddeaa-4a52-4a80-b533-e5666af86b9a req-a6e5c3ab-9dad-45d3-bf24-57f180a50f92 service nova] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Received event network-vif-plugged-67e6934e-98a0-4355-80f1-869c87d01ea0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1946.611316] env[63279]: DEBUG oslo_concurrency.lockutils [req-e52ddeaa-4a52-4a80-b533-e5666af86b9a req-a6e5c3ab-9dad-45d3-bf24-57f180a50f92 service nova] Acquiring lock "cf1b70af-335d-404b-bb4f-fe082dd6f450-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.611527] env[63279]: DEBUG oslo_concurrency.lockutils [req-e52ddeaa-4a52-4a80-b533-e5666af86b9a req-a6e5c3ab-9dad-45d3-bf24-57f180a50f92 service nova] Lock "cf1b70af-335d-404b-bb4f-fe082dd6f450-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.611696] env[63279]: DEBUG oslo_concurrency.lockutils [req-e52ddeaa-4a52-4a80-b533-e5666af86b9a req-a6e5c3ab-9dad-45d3-bf24-57f180a50f92 service nova] Lock "cf1b70af-335d-404b-bb4f-fe082dd6f450-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.611860] env[63279]: DEBUG nova.compute.manager [req-e52ddeaa-4a52-4a80-b533-e5666af86b9a req-a6e5c3ab-9dad-45d3-bf24-57f180a50f92 service nova] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] No waiting events found dispatching network-vif-plugged-67e6934e-98a0-4355-80f1-869c87d01ea0 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1946.612031] env[63279]: WARNING nova.compute.manager [req-e52ddeaa-4a52-4a80-b533-e5666af86b9a req-a6e5c3ab-9dad-45d3-bf24-57f180a50f92 service nova] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Received unexpected event network-vif-plugged-67e6934e-98a0-4355-80f1-869c87d01ea0 for instance with vm_state building and task_state spawning. [ 1946.612194] env[63279]: DEBUG nova.compute.manager [req-e52ddeaa-4a52-4a80-b533-e5666af86b9a req-a6e5c3ab-9dad-45d3-bf24-57f180a50f92 service nova] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Received event network-changed-67e6934e-98a0-4355-80f1-869c87d01ea0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1946.612383] env[63279]: DEBUG nova.compute.manager [req-e52ddeaa-4a52-4a80-b533-e5666af86b9a req-a6e5c3ab-9dad-45d3-bf24-57f180a50f92 service nova] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Refreshing instance network info cache due to event network-changed-67e6934e-98a0-4355-80f1-869c87d01ea0. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1946.612995] env[63279]: DEBUG oslo_concurrency.lockutils [req-e52ddeaa-4a52-4a80-b533-e5666af86b9a req-a6e5c3ab-9dad-45d3-bf24-57f180a50f92 service nova] Acquiring lock "refresh_cache-cf1b70af-335d-404b-bb4f-fe082dd6f450" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.796070] env[63279]: DEBUG nova.compute.manager [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1946.800100] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d128cf11-2d83-47b1-b6c4-be749dcd84f8 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquiring lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.800100] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d128cf11-2d83-47b1-b6c4-be749dcd84f8 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquired lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1946.828232] env[63279]: DEBUG nova.compute.manager [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1946.828511] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1946.828840] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087053, 'name': Rename_Task, 'duration_secs': 0.379441} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.829624] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad4173d-b1b2-4438-a200-2a7c3518a2a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.835505] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1946.842100] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a77cea61-123a-4d86-8a92-b1b1119af313 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.852196] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1946.853572] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-874a1b26-7a39-4eac-a3b0-6248756a39be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.857337] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Waiting for the task: (returnval){ [ 1946.857337] env[63279]: value = "task-2087057" [ 1946.857337] env[63279]: _type = "Task" [ 1946.857337] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.864795] env[63279]: DEBUG oslo_vmware.api [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Waiting for the task: (returnval){ [ 1946.864795] env[63279]: value = "task-2087058" [ 1946.864795] env[63279]: _type = "Task" [ 1946.864795] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.870300] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087057, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.879126] env[63279]: DEBUG oslo_vmware.api [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087058, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.070327] env[63279]: DEBUG nova.network.neutron [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Updating instance_info_cache with network_info: [{"id": "67e6934e-98a0-4355-80f1-869c87d01ea0", "address": "fa:16:3e:80:f3:4b", "network": {"id": "ac45106d-c291-4b98-a490-29c7ee886e53", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1401218882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e60a9f94cc8403eaa79a85a1a17160b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e6934e-98", "ovs_interfaceid": "67e6934e-98a0-4355-80f1-869c87d01ea0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.095595] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8376fa4-54d9-43da-a325-c30a346491df {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.104904] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad18065a-d4a9-4f74-a4cb-db5e927ef7e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.144393] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf17654-0dd3-458f-b057-eee721f1b44d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.152478] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343955f6-05bb-4235-af0d-f59d0d9ea3e2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.169024] env[63279]: DEBUG nova.compute.provider_tree [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1947.286832] env[63279]: DEBUG nova.compute.manager [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1947.322064] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1947.323161] env[63279]: DEBUG nova.virt.hardware [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1947.323369] env[63279]: DEBUG nova.virt.hardware [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1947.323602] env[63279]: DEBUG nova.virt.hardware [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1947.323818] env[63279]: DEBUG nova.virt.hardware [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1947.324088] env[63279]: DEBUG nova.virt.hardware [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1947.324197] env[63279]: DEBUG nova.virt.hardware [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1947.324463] env[63279]: DEBUG nova.virt.hardware [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1947.324665] env[63279]: DEBUG nova.virt.hardware [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1947.324895] env[63279]: DEBUG nova.virt.hardware [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1947.325051] env[63279]: DEBUG nova.virt.hardware [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1947.325287] env[63279]: DEBUG nova.virt.hardware [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1947.326461] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c8bdd3-fdb4-4574-a637-52528ec65dc4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.337839] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b8567e-4c98-4409-b8a5-1355397c3396 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.368531] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087057, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.376865] env[63279]: DEBUG oslo_vmware.api [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087058, 'name': PowerOffVM_Task, 'duration_secs': 0.253673} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.377150] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1947.377337] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1947.377678] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-397e4c50-4650-4217-bbf8-e670605d8678 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.433219] env[63279]: DEBUG nova.network.neutron [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Successfully updated port: b6a4dca7-a0ba-4270-8c1f-2afac4d449bb {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1947.530843] env[63279]: DEBUG nova.network.neutron [None req-d128cf11-2d83-47b1-b6c4-be749dcd84f8 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1947.573463] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Releasing lock "refresh_cache-cf1b70af-335d-404b-bb4f-fe082dd6f450" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1947.573841] env[63279]: DEBUG nova.compute.manager [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Instance network_info: |[{"id": "67e6934e-98a0-4355-80f1-869c87d01ea0", "address": "fa:16:3e:80:f3:4b", "network": {"id": "ac45106d-c291-4b98-a490-29c7ee886e53", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1401218882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e60a9f94cc8403eaa79a85a1a17160b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e6934e-98", "ovs_interfaceid": "67e6934e-98a0-4355-80f1-869c87d01ea0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1947.574183] env[63279]: DEBUG oslo_concurrency.lockutils [req-e52ddeaa-4a52-4a80-b533-e5666af86b9a req-a6e5c3ab-9dad-45d3-bf24-57f180a50f92 service nova] Acquired lock "refresh_cache-cf1b70af-335d-404b-bb4f-fe082dd6f450" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1947.574365] env[63279]: DEBUG nova.network.neutron [req-e52ddeaa-4a52-4a80-b533-e5666af86b9a req-a6e5c3ab-9dad-45d3-bf24-57f180a50f92 service nova] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Refreshing network info cache for port 67e6934e-98a0-4355-80f1-869c87d01ea0 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1947.579019] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:f3:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0a76279-3c11-4bef-b124-2a2ee13fa377', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67e6934e-98a0-4355-80f1-869c87d01ea0', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1947.584774] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Creating folder: Project (7e60a9f94cc8403eaa79a85a1a17160b). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1947.586099] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2bce910-c75c-4ca2-8944-c4f7aae16fbf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.598262] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Created folder: Project (7e60a9f94cc8403eaa79a85a1a17160b) in parent group-v427491. [ 1947.598487] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Creating folder: Instances. Parent ref: group-v427604. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1947.598781] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-282dfaf3-bfc9-4d6f-95f5-01a91e6554c7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.611779] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Created folder: Instances in parent group-v427604. [ 1947.612031] env[63279]: DEBUG oslo.service.loopingcall [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1947.614864] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1947.614864] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-60423d81-021e-4b42-b1d7-95beee49fcfa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.634047] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1947.634047] env[63279]: value = "task-2087062" [ 1947.634047] env[63279]: _type = "Task" [ 1947.634047] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.642630] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087062, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.674118] env[63279]: DEBUG nova.scheduler.client.report [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1947.784896] env[63279]: DEBUG nova.compute.manager [req-912498f9-b5e6-442e-ad72-fb48b0a4a3d2 req-c6e05e5b-c6af-4959-b91e-e68db8d4a969 service nova] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Received event network-vif-plugged-b6a4dca7-a0ba-4270-8c1f-2afac4d449bb {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1947.785227] env[63279]: DEBUG oslo_concurrency.lockutils [req-912498f9-b5e6-442e-ad72-fb48b0a4a3d2 req-c6e05e5b-c6af-4959-b91e-e68db8d4a969 service nova] Acquiring lock "c287072d-0ce9-4075-8895-0f64326ac303-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1947.785524] env[63279]: DEBUG oslo_concurrency.lockutils [req-912498f9-b5e6-442e-ad72-fb48b0a4a3d2 req-c6e05e5b-c6af-4959-b91e-e68db8d4a969 service nova] Lock "c287072d-0ce9-4075-8895-0f64326ac303-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1947.785777] env[63279]: DEBUG oslo_concurrency.lockutils [req-912498f9-b5e6-442e-ad72-fb48b0a4a3d2 req-c6e05e5b-c6af-4959-b91e-e68db8d4a969 service nova] Lock "c287072d-0ce9-4075-8895-0f64326ac303-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1947.786028] env[63279]: DEBUG nova.compute.manager [req-912498f9-b5e6-442e-ad72-fb48b0a4a3d2 req-c6e05e5b-c6af-4959-b91e-e68db8d4a969 service nova] [instance: c287072d-0ce9-4075-8895-0f64326ac303] No waiting events found dispatching network-vif-plugged-b6a4dca7-a0ba-4270-8c1f-2afac4d449bb {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1947.786287] env[63279]: WARNING nova.compute.manager [req-912498f9-b5e6-442e-ad72-fb48b0a4a3d2 req-c6e05e5b-c6af-4959-b91e-e68db8d4a969 service nova] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Received unexpected event network-vif-plugged-b6a4dca7-a0ba-4270-8c1f-2afac4d449bb for instance with vm_state building and task_state spawning. [ 1947.868218] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087057, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.938333] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Acquiring lock "refresh_cache-c287072d-0ce9-4075-8895-0f64326ac303" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1947.938333] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Acquired lock "refresh_cache-c287072d-0ce9-4075-8895-0f64326ac303" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1947.938333] env[63279]: DEBUG nova.network.neutron [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1948.038521] env[63279]: DEBUG oslo_concurrency.lockutils [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquiring lock "81103d53-99fe-4d1a-816f-7685c59c80ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.038985] env[63279]: DEBUG oslo_concurrency.lockutils [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Lock "81103d53-99fe-4d1a-816f-7685c59c80ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.039221] env[63279]: DEBUG oslo_concurrency.lockutils [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquiring lock "81103d53-99fe-4d1a-816f-7685c59c80ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.039418] env[63279]: DEBUG oslo_concurrency.lockutils [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Lock "81103d53-99fe-4d1a-816f-7685c59c80ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.039904] env[63279]: DEBUG oslo_concurrency.lockutils [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Lock "81103d53-99fe-4d1a-816f-7685c59c80ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1948.042116] env[63279]: INFO nova.compute.manager [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Terminating instance [ 1948.153074] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087062, 'name': CreateVM_Task, 'duration_secs': 0.388118} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.153817] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1948.154727] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1948.154727] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1948.155110] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1948.155560] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52a5f736-1218-4f46-bb09-d3f5dd491035 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.161044] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Waiting for the task: (returnval){ [ 1948.161044] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5203adc5-ded1-02f0-cfcc-3371e633772b" [ 1948.161044] env[63279]: _type = "Task" [ 1948.161044] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.171298] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5203adc5-ded1-02f0-cfcc-3371e633772b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.181201] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.671s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1948.182022] env[63279]: DEBUG nova.compute.manager [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1948.184973] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 27.976s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.185400] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1948.185612] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1948.185927] env[63279]: DEBUG oslo_concurrency.lockutils [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.777s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.186344] env[63279]: DEBUG nova.objects.instance [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lazy-loading 'resources' on Instance uuid f20e8eb2-847b-4988-abf9-ed5f9f65c25c {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1948.190907] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6903fa2-ad51-4627-9027-7af63456d4a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.201777] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4163ab02-f0c6-40c2-9d87-7b14a475acb2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.219531] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e913824-9c02-42cc-999e-f83bf1d7bbef {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.233371] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ef1a93-13e9-40e8-9472-f1644529b3e2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.264358] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179680MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1948.264634] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.371870] env[63279]: DEBUG oslo_vmware.api [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087057, 'name': PowerOnVM_Task, 'duration_secs': 1.096697} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.372548] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1948.372548] env[63279]: INFO nova.compute.manager [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Took 9.24 seconds to spawn the instance on the hypervisor. [ 1948.373049] env[63279]: DEBUG nova.compute.manager [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1948.373789] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1adacc90-7967-4698-bbb8-0b23a6954b15 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.387014] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1948.388470] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1948.388470] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Deleting the datastore file [datastore1] de543869-8ab1-40ed-8f6d-dc506c257843 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1948.388470] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-704668a2-2864-4416-9fa7-29204498d652 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.395833] env[63279]: DEBUG oslo_vmware.api [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Waiting for the task: (returnval){ [ 1948.395833] env[63279]: value = "task-2087063" [ 1948.395833] env[63279]: _type = "Task" [ 1948.395833] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.405487] env[63279]: DEBUG oslo_vmware.api [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.413197] env[63279]: DEBUG nova.network.neutron [req-e52ddeaa-4a52-4a80-b533-e5666af86b9a req-a6e5c3ab-9dad-45d3-bf24-57f180a50f92 service nova] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Updated VIF entry in instance network info cache for port 67e6934e-98a0-4355-80f1-869c87d01ea0. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1948.413549] env[63279]: DEBUG nova.network.neutron [req-e52ddeaa-4a52-4a80-b533-e5666af86b9a req-a6e5c3ab-9dad-45d3-bf24-57f180a50f92 service nova] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Updating instance_info_cache with network_info: [{"id": "67e6934e-98a0-4355-80f1-869c87d01ea0", "address": "fa:16:3e:80:f3:4b", "network": {"id": "ac45106d-c291-4b98-a490-29c7ee886e53", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1401218882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e60a9f94cc8403eaa79a85a1a17160b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e6934e-98", "ovs_interfaceid": "67e6934e-98a0-4355-80f1-869c87d01ea0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1948.421388] env[63279]: DEBUG nova.network.neutron [None req-d128cf11-2d83-47b1-b6c4-be749dcd84f8 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Updating instance_info_cache with network_info: [{"id": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "address": "fa:16:3e:b1:26:30", "network": {"id": "50f30894-1239-497e-9f70-afa5b0c429ea", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-85780566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfdf69a97cf54d5cb8c4fb1c59b6a5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1063b89-be", "ovs_interfaceid": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1948.484048] env[63279]: DEBUG nova.network.neutron [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1948.514541] env[63279]: DEBUG oslo_vmware.rw_handles [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520a359c-b81b-0031-5fef-ff578add4e25/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1948.515623] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c31125-2254-42da-993b-0d7748e789e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.522189] env[63279]: DEBUG oslo_vmware.rw_handles [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520a359c-b81b-0031-5fef-ff578add4e25/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1948.522388] env[63279]: ERROR oslo_vmware.rw_handles [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520a359c-b81b-0031-5fef-ff578add4e25/disk-0.vmdk due to incomplete transfer. [ 1948.522675] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4a21c8d0-defe-408b-a98b-d1caa6561d1a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.531511] env[63279]: DEBUG oslo_vmware.rw_handles [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520a359c-b81b-0031-5fef-ff578add4e25/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1948.531790] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Uploaded image f5563867-31c5-4766-b374-f1fdf29bdd84 to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1948.534654] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1948.535260] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1a93a32e-4d5c-4a30-973e-02ba1802edd5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.541348] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1948.541348] env[63279]: value = "task-2087064" [ 1948.541348] env[63279]: _type = "Task" [ 1948.541348] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.546579] env[63279]: DEBUG nova.compute.manager [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1948.547287] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1948.547779] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4fab8f-88af-4ca4-8f2b-90d8ab6f0222 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.562495] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087064, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.562977] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1948.563496] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d25ae4bf-e661-476d-8867-4c7d290ffe8f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.566481] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "1a55008a-4d8c-403d-a8f4-966aa7346f4c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.566798] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "1a55008a-4d8c-403d-a8f4-966aa7346f4c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.567099] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "1a55008a-4d8c-403d-a8f4-966aa7346f4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.567383] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "1a55008a-4d8c-403d-a8f4-966aa7346f4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.567657] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "1a55008a-4d8c-403d-a8f4-966aa7346f4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1948.573031] env[63279]: DEBUG oslo_vmware.api [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1948.573031] env[63279]: value = "task-2087065" [ 1948.573031] env[63279]: _type = "Task" [ 1948.573031] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.573623] env[63279]: INFO nova.compute.manager [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Terminating instance [ 1948.590412] env[63279]: DEBUG oslo_vmware.api [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087065, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.674722] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5203adc5-ded1-02f0-cfcc-3371e633772b, 'name': SearchDatastore_Task, 'duration_secs': 0.010167} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.674722] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1948.674722] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1948.674722] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1948.674974] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1948.675169] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1948.675534] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e588a81f-ad1b-440a-b152-8a70025b41ed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.690262] env[63279]: DEBUG nova.compute.utils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1948.692071] env[63279]: DEBUG nova.compute.manager [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1948.692290] env[63279]: DEBUG nova.network.neutron [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1948.703797] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1948.704065] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1948.705544] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5c6fb58-ccb3-48d0-9b75-b577c08df2ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.710595] env[63279]: DEBUG nova.network.neutron [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Updating instance_info_cache with network_info: [{"id": "b6a4dca7-a0ba-4270-8c1f-2afac4d449bb", "address": "fa:16:3e:78:ce:e6", "network": {"id": "f1132cd7-bc01-4f0f-a7d3-e9d52254864b", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-852485082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a6f286a404d44aab6d17264b25f49f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6a4dca7-a0", "ovs_interfaceid": "b6a4dca7-a0ba-4270-8c1f-2afac4d449bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1948.716171] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Waiting for the task: (returnval){ [ 1948.716171] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a4b21b-fcf7-0cf2-8238-745225ffad25" [ 1948.716171] env[63279]: _type = "Task" [ 1948.716171] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.730202] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a4b21b-fcf7-0cf2-8238-745225ffad25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.872126] env[63279]: DEBUG nova.policy [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a5a0f7d7b9b54c908fb899f54f4c0de4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ea8d4af9e9b4df8930ef52450936dcf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1948.898597] env[63279]: INFO nova.compute.manager [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Took 48.64 seconds to build instance. [ 1948.910097] env[63279]: DEBUG oslo_vmware.api [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Task: {'id': task-2087063, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.278575} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.914674] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1948.914674] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1948.914674] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1948.914674] env[63279]: INFO nova.compute.manager [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Took 2.09 seconds to destroy the instance on the hypervisor. [ 1948.914674] env[63279]: DEBUG oslo.service.loopingcall [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1948.915268] env[63279]: DEBUG nova.compute.manager [-] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1948.915268] env[63279]: DEBUG nova.network.neutron [-] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1948.920036] env[63279]: DEBUG oslo_concurrency.lockutils [req-e52ddeaa-4a52-4a80-b533-e5666af86b9a req-a6e5c3ab-9dad-45d3-bf24-57f180a50f92 service nova] Releasing lock "refresh_cache-cf1b70af-335d-404b-bb4f-fe082dd6f450" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1948.926711] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d128cf11-2d83-47b1-b6c4-be749dcd84f8 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Releasing lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1948.926949] env[63279]: DEBUG nova.compute.manager [None req-d128cf11-2d83-47b1-b6c4-be749dcd84f8 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Inject network info {{(pid=63279) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1948.927232] env[63279]: DEBUG nova.compute.manager [None req-d128cf11-2d83-47b1-b6c4-be749dcd84f8 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] network_info to inject: |[{"id": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "address": "fa:16:3e:b1:26:30", "network": {"id": "50f30894-1239-497e-9f70-afa5b0c429ea", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-85780566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfdf69a97cf54d5cb8c4fb1c59b6a5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1063b89-be", "ovs_interfaceid": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1948.932295] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d128cf11-2d83-47b1-b6c4-be749dcd84f8 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Reconfiguring VM instance to set the machine id {{(pid=63279) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1948.935824] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c14fe844-650d-42d5-90f2-762277c08cc3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.953731] env[63279]: DEBUG oslo_vmware.api [None req-d128cf11-2d83-47b1-b6c4-be749dcd84f8 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Waiting for the task: (returnval){ [ 1948.953731] env[63279]: value = "task-2087067" [ 1948.953731] env[63279]: _type = "Task" [ 1948.953731] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.969348] env[63279]: DEBUG oslo_vmware.api [None req-d128cf11-2d83-47b1-b6c4-be749dcd84f8 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2087067, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.052524] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087064, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.084864] env[63279]: DEBUG nova.compute.manager [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1949.085106] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1949.086416] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18572f42-c7e3-4e51-bb67-096a97b65c65 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.096677] env[63279]: DEBUG oslo_vmware.api [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087065, 'name': PowerOffVM_Task, 'duration_secs': 0.323259} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.097677] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1949.097677] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1949.097834] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1949.098466] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e652b4c1-842b-4c89-9094-70b153df3269 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.099569] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cebd845b-338e-4d5d-abbd-0b8d82893ba6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.106591] env[63279]: DEBUG oslo_vmware.api [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 1949.106591] env[63279]: value = "task-2087068" [ 1949.106591] env[63279]: _type = "Task" [ 1949.106591] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.117259] env[63279]: DEBUG oslo_vmware.api [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087068, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.201809] env[63279]: DEBUG nova.compute.manager [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1949.217691] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Releasing lock "refresh_cache-c287072d-0ce9-4075-8895-0f64326ac303" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1949.218188] env[63279]: DEBUG nova.compute.manager [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Instance network_info: |[{"id": "b6a4dca7-a0ba-4270-8c1f-2afac4d449bb", "address": "fa:16:3e:78:ce:e6", "network": {"id": "f1132cd7-bc01-4f0f-a7d3-e9d52254864b", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-852485082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a6f286a404d44aab6d17264b25f49f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6a4dca7-a0", "ovs_interfaceid": "b6a4dca7-a0ba-4270-8c1f-2afac4d449bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1949.221030] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:ce:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '459b8c74-0aa6-42b6-996a-42b1c5d7e5c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6a4dca7-a0ba-4270-8c1f-2afac4d449bb', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1949.229171] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Creating folder: Project (7a6f286a404d44aab6d17264b25f49f0). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1949.238844] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9bf383d2-e5a2-4197-8512-800e2d5cc61a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.251076] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a4b21b-fcf7-0cf2-8238-745225ffad25, 'name': SearchDatastore_Task, 'duration_secs': 0.018885} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.251514] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6ef50c0-3cc6-493b-a44f-a4445a53ebaf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.255035] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Created folder: Project (7a6f286a404d44aab6d17264b25f49f0) in parent group-v427491. [ 1949.255169] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Creating folder: Instances. Parent ref: group-v427607. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1949.261185] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-34912a3d-1265-46f3-be54-7307f69c3a8b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.271433] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Waiting for the task: (returnval){ [ 1949.271433] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]528f5ef4-d187-8180-fc3c-04f3ba733a39" [ 1949.271433] env[63279]: _type = "Task" [ 1949.271433] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.279264] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Created folder: Instances in parent group-v427607. [ 1949.279334] env[63279]: DEBUG oslo.service.loopingcall [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1949.279898] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1949.280146] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c342a1d6-4b1d-4df6-8fde-ef019e80501f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.301843] env[63279]: DEBUG nova.network.neutron [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Successfully created port: 8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1949.306072] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]528f5ef4-d187-8180-fc3c-04f3ba733a39, 'name': SearchDatastore_Task, 'duration_secs': 0.01101} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.307328] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1949.308566] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] cf1b70af-335d-404b-bb4f-fe082dd6f450/cf1b70af-335d-404b-bb4f-fe082dd6f450.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1949.308915] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32d7edeb-a83e-406a-839e-a71184a66c4c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.313247] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1949.313247] env[63279]: value = "task-2087072" [ 1949.313247] env[63279]: _type = "Task" [ 1949.313247] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.321933] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Waiting for the task: (returnval){ [ 1949.321933] env[63279]: value = "task-2087073" [ 1949.321933] env[63279]: _type = "Task" [ 1949.321933] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.330094] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087072, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.336734] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087073, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.349528] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1949.349528] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1949.349528] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Deleting the datastore file [datastore1] 81103d53-99fe-4d1a-816f-7685c59c80ee {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1949.349528] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d63594d-c390-4324-9be4-04d897f1ed54 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.360341] env[63279]: DEBUG nova.compute.manager [req-89ffad74-c20e-4945-9118-59bc4d95574f req-2b56e2a3-90bf-4dce-a5e5-3a9cda03fbf7 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Received event network-changed-e1063b89-be88-474a-a2ec-b61eb11cf9fe {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1949.361035] env[63279]: DEBUG nova.compute.manager [req-89ffad74-c20e-4945-9118-59bc4d95574f req-2b56e2a3-90bf-4dce-a5e5-3a9cda03fbf7 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Refreshing instance network info cache due to event network-changed-e1063b89-be88-474a-a2ec-b61eb11cf9fe. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1949.361035] env[63279]: DEBUG oslo_concurrency.lockutils [req-89ffad74-c20e-4945-9118-59bc4d95574f req-2b56e2a3-90bf-4dce-a5e5-3a9cda03fbf7 service nova] Acquiring lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1949.361035] env[63279]: DEBUG oslo_concurrency.lockutils [req-89ffad74-c20e-4945-9118-59bc4d95574f req-2b56e2a3-90bf-4dce-a5e5-3a9cda03fbf7 service nova] Acquired lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1949.361256] env[63279]: DEBUG nova.network.neutron [req-89ffad74-c20e-4945-9118-59bc4d95574f req-2b56e2a3-90bf-4dce-a5e5-3a9cda03fbf7 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Refreshing network info cache for port e1063b89-be88-474a-a2ec-b61eb11cf9fe {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1949.365350] env[63279]: DEBUG oslo_vmware.api [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for the task: (returnval){ [ 1949.365350] env[63279]: value = "task-2087074" [ 1949.365350] env[63279]: _type = "Task" [ 1949.365350] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.380762] env[63279]: DEBUG oslo_vmware.api [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087074, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.405055] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a8933e7-208a-443d-9249-2440f3ab60b6 tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Lock "e04f06de-da6a-4562-a50a-ff16bf3a006e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.353s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.417164] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d9ebd1-e08d-45f6-9ae1-c9f2800d4a09 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.426558] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcac7b05-773b-4877-8efb-d8b9f32c401a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.467809] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268a6325-acb7-4807-87b4-9e001b31dd85 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.476269] env[63279]: DEBUG oslo_vmware.api [None req-d128cf11-2d83-47b1-b6c4-be749dcd84f8 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2087067, 'name': ReconfigVM_Task, 'duration_secs': 0.14461} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.478574] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d128cf11-2d83-47b1-b6c4-be749dcd84f8 tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Reconfigured VM instance to set the machine id {{(pid=63279) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1949.482996] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c946112-d72c-47ff-92de-18bb04c2657e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.497967] env[63279]: DEBUG nova.compute.provider_tree [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1949.529565] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Acquiring lock "ecedded1-7169-49a4-8a9e-2fe4086db986" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1949.529977] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Lock "ecedded1-7169-49a4-8a9e-2fe4086db986" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1949.530287] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Acquiring lock "ecedded1-7169-49a4-8a9e-2fe4086db986-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1949.530606] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Lock "ecedded1-7169-49a4-8a9e-2fe4086db986-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1949.530881] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Lock "ecedded1-7169-49a4-8a9e-2fe4086db986-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.534184] env[63279]: INFO nova.compute.manager [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Terminating instance [ 1949.556190] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087064, 'name': Destroy_Task, 'duration_secs': 0.84139} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.556405] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Destroyed the VM [ 1949.556766] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1949.557115] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b7c30d70-d7f2-42eb-9451-eeeccd0f189a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.566307] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1949.566307] env[63279]: value = "task-2087075" [ 1949.566307] env[63279]: _type = "Task" [ 1949.566307] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.576887] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087075, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.619868] env[63279]: DEBUG oslo_vmware.api [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087068, 'name': PowerOffVM_Task, 'duration_secs': 0.183393} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.620224] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1949.620446] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1949.620722] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6cf35936-baa8-4648-9615-18d0f6588d5a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.751455] env[63279]: DEBUG nova.network.neutron [-] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1949.824882] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087072, 'name': CreateVM_Task, 'duration_secs': 0.504893} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.832128] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1949.833327] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1949.833524] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1949.833930] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1949.834697] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28f9dc0d-40ba-4742-9fab-395ea8570a67 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.841985] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087073, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.845714] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Waiting for the task: (returnval){ [ 1949.845714] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]525beee3-20a2-f373-1da2-edeae06749af" [ 1949.845714] env[63279]: _type = "Task" [ 1949.845714] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.852596] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1949.852821] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1949.853032] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Deleting the datastore file [datastore1] 1a55008a-4d8c-403d-a8f4-966aa7346f4c {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1949.856177] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a08bcef9-c4cb-40b8-a18f-283ec93bc131 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.858095] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525beee3-20a2-f373-1da2-edeae06749af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.864935] env[63279]: DEBUG oslo_vmware.api [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 1949.864935] env[63279]: value = "task-2087077" [ 1949.864935] env[63279]: _type = "Task" [ 1949.864935] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.877608] env[63279]: DEBUG oslo_vmware.api [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.880908] env[63279]: DEBUG oslo_vmware.api [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Task: {'id': task-2087074, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.377578} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.881064] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1949.881355] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1949.883960] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1949.883960] env[63279]: INFO nova.compute.manager [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Took 1.33 seconds to destroy the instance on the hypervisor. [ 1949.883960] env[63279]: DEBUG oslo.service.loopingcall [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1949.883960] env[63279]: DEBUG nova.compute.manager [-] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1949.883960] env[63279]: DEBUG nova.network.neutron [-] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1949.908541] env[63279]: DEBUG nova.compute.manager [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1949.915598] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "0e12ab9b-a701-4e0f-9d96-939090f50494" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1949.915812] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "0e12ab9b-a701-4e0f-9d96-939090f50494" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.002959] env[63279]: DEBUG nova.scheduler.client.report [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1950.039454] env[63279]: DEBUG nova.compute.manager [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1950.039815] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1950.041371] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec4db68-fcaa-4b6a-9c97-3277419d9d6a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.050570] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1950.051340] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ce95f4f-d788-491e-8128-949ac5d95a81 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.057879] env[63279]: DEBUG oslo_vmware.api [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Waiting for the task: (returnval){ [ 1950.057879] env[63279]: value = "task-2087078" [ 1950.057879] env[63279]: _type = "Task" [ 1950.057879] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.066828] env[63279]: DEBUG oslo_vmware.api [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2087078, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.078742] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087075, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.216604] env[63279]: DEBUG nova.compute.manager [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1950.249683] env[63279]: DEBUG nova.virt.hardware [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1950.250012] env[63279]: DEBUG nova.virt.hardware [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1950.251423] env[63279]: DEBUG nova.virt.hardware [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1950.251691] env[63279]: DEBUG nova.virt.hardware [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1950.251903] env[63279]: DEBUG nova.virt.hardware [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1950.252064] env[63279]: DEBUG nova.virt.hardware [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1950.252330] env[63279]: DEBUG nova.virt.hardware [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1950.252519] env[63279]: DEBUG nova.virt.hardware [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1950.252699] env[63279]: DEBUG nova.virt.hardware [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1950.252893] env[63279]: DEBUG nova.virt.hardware [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1950.253108] env[63279]: DEBUG nova.virt.hardware [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1950.254178] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430827d8-5ad3-4240-b9ee-33e8c373e76b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.260017] env[63279]: INFO nova.compute.manager [-] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Took 1.35 seconds to deallocate network for instance. [ 1950.275544] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58dc787-df9a-43b6-8d85-d8e62487c6a2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.284353] env[63279]: DEBUG nova.compute.manager [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Received event network-changed-7dc16370-0621-49ff-9730-abdfd18ff164 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1950.284353] env[63279]: DEBUG nova.compute.manager [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Refreshing instance network info cache due to event network-changed-7dc16370-0621-49ff-9730-abdfd18ff164. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1950.284353] env[63279]: DEBUG oslo_concurrency.lockutils [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] Acquiring lock "refresh_cache-f375b54b-f9de-4529-b752-52c240aed532" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1950.284488] env[63279]: DEBUG oslo_concurrency.lockutils [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] Acquired lock "refresh_cache-f375b54b-f9de-4529-b752-52c240aed532" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1950.284640] env[63279]: DEBUG nova.network.neutron [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Refreshing network info cache for port 7dc16370-0621-49ff-9730-abdfd18ff164 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1950.338078] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087073, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539055} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.339523] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] cf1b70af-335d-404b-bb4f-fe082dd6f450/cf1b70af-335d-404b-bb4f-fe082dd6f450.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1950.339523] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1950.339523] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2250ba9-4597-4d88-ad26-c4a25a5609c2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.352034] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Waiting for the task: (returnval){ [ 1950.352034] env[63279]: value = "task-2087079" [ 1950.352034] env[63279]: _type = "Task" [ 1950.352034] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.366807] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525beee3-20a2-f373-1da2-edeae06749af, 'name': SearchDatastore_Task, 'duration_secs': 0.011889} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.378261] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1950.378261] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1950.378627] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1950.378690] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1950.378896] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1950.379623] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087079, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.380336] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-880e8ea9-247e-43d2-96fb-4609ed1902cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.388885] env[63279]: DEBUG oslo_vmware.api [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087077, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155723} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.390275] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1950.390762] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1950.390998] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1950.391203] env[63279]: INFO nova.compute.manager [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1950.391469] env[63279]: DEBUG oslo.service.loopingcall [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1950.391778] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1950.391860] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1950.392987] env[63279]: DEBUG nova.compute.manager [-] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1950.393130] env[63279]: DEBUG nova.network.neutron [-] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1950.395012] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb8cd849-03dd-4b2a-9dd8-7c27a2021e88 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.402203] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Waiting for the task: (returnval){ [ 1950.402203] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52307291-2f60-cbac-e080-3cbde86412a7" [ 1950.402203] env[63279]: _type = "Task" [ 1950.402203] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.412211] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52307291-2f60-cbac-e080-3cbde86412a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.435956] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.454819] env[63279]: DEBUG nova.network.neutron [req-89ffad74-c20e-4945-9118-59bc4d95574f req-2b56e2a3-90bf-4dce-a5e5-3a9cda03fbf7 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Updated VIF entry in instance network info cache for port e1063b89-be88-474a-a2ec-b61eb11cf9fe. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1950.455142] env[63279]: DEBUG nova.network.neutron [req-89ffad74-c20e-4945-9118-59bc4d95574f req-2b56e2a3-90bf-4dce-a5e5-3a9cda03fbf7 service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Updating instance_info_cache with network_info: [{"id": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "address": "fa:16:3e:b1:26:30", "network": {"id": "50f30894-1239-497e-9f70-afa5b0c429ea", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-85780566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfdf69a97cf54d5cb8c4fb1c59b6a5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1063b89-be", "ovs_interfaceid": "e1063b89-be88-474a-a2ec-b61eb11cf9fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.511346] env[63279]: DEBUG oslo_concurrency.lockutils [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.325s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.515365] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.264s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.515627] env[63279]: DEBUG nova.objects.instance [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lazy-loading 'resources' on Instance uuid 0224e4ea-c13c-4abd-9626-6038c0bbe4e9 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1950.537928] env[63279]: INFO nova.scheduler.client.report [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Deleted allocations for instance f20e8eb2-847b-4988-abf9-ed5f9f65c25c [ 1950.569369] env[63279]: DEBUG oslo_vmware.api [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2087078, 'name': PowerOffVM_Task, 'duration_secs': 0.26869} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.572466] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1950.572641] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1950.572891] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0be67f08-4347-4e08-9631-163b3cf6c74c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.579833] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087075, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.679554] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquiring lock "fcc5a636-554f-424e-a604-a8e7bd7cf574" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.679912] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Lock "fcc5a636-554f-424e-a604-a8e7bd7cf574" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.680192] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquiring lock "fcc5a636-554f-424e-a604-a8e7bd7cf574-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.680353] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Lock "fcc5a636-554f-424e-a604-a8e7bd7cf574-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.680536] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Lock "fcc5a636-554f-424e-a604-a8e7bd7cf574-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.683378] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1950.683772] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1950.683772] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Deleting the datastore file [datastore1] ecedded1-7169-49a4-8a9e-2fe4086db986 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1950.684252] env[63279]: INFO nova.compute.manager [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Terminating instance [ 1950.685595] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df6ece0c-0d91-467f-9aef-7e99bde23ceb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.693371] env[63279]: DEBUG oslo_vmware.api [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Waiting for the task: (returnval){ [ 1950.693371] env[63279]: value = "task-2087081" [ 1950.693371] env[63279]: _type = "Task" [ 1950.693371] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.702105] env[63279]: DEBUG oslo_vmware.api [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2087081, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.767501] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.864471] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087079, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072216} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.864779] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1950.865630] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c3d7d2-d3ff-4a95-97a6-a83e848fe4cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.890239] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] cf1b70af-335d-404b-bb4f-fe082dd6f450/cf1b70af-335d-404b-bb4f-fe082dd6f450.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1950.891057] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0902cf9-bb37-412c-89e0-09e15903c3ba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.921421] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52307291-2f60-cbac-e080-3cbde86412a7, 'name': SearchDatastore_Task, 'duration_secs': 0.012261} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.923402] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Waiting for the task: (returnval){ [ 1950.923402] env[63279]: value = "task-2087082" [ 1950.923402] env[63279]: _type = "Task" [ 1950.923402] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.923603] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4e2bf55-dd1d-4fd3-a78b-a6ba660787ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.934749] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087082, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.935720] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Waiting for the task: (returnval){ [ 1950.935720] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521841af-cbef-5ff7-6b13-1a843e229682" [ 1950.935720] env[63279]: _type = "Task" [ 1950.935720] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.944421] env[63279]: DEBUG nova.network.neutron [-] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.949021] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521841af-cbef-5ff7-6b13-1a843e229682, 'name': SearchDatastore_Task, 'duration_secs': 0.00959} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.949021] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1950.949021] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] c287072d-0ce9-4075-8895-0f64326ac303/c287072d-0ce9-4075-8895-0f64326ac303.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1950.949021] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d63677c4-6cb3-44e6-890e-586588a2790b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.953791] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Waiting for the task: (returnval){ [ 1950.953791] env[63279]: value = "task-2087083" [ 1950.953791] env[63279]: _type = "Task" [ 1950.953791] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.957757] env[63279]: DEBUG oslo_concurrency.lockutils [req-89ffad74-c20e-4945-9118-59bc4d95574f req-2b56e2a3-90bf-4dce-a5e5-3a9cda03fbf7 service nova] Releasing lock "refresh_cache-fcc5a636-554f-424e-a604-a8e7bd7cf574" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1950.964765] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087083, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.978175] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1950.978471] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427603', 'volume_id': '977ada7e-edf6-44b6-8cb5-41e0baf8daf1', 'name': 'volume-977ada7e-edf6-44b6-8cb5-41e0baf8daf1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5d4909ea-396c-45ba-9ff5-acb8576150b3', 'attached_at': '', 'detached_at': '', 'volume_id': '977ada7e-edf6-44b6-8cb5-41e0baf8daf1', 'serial': '977ada7e-edf6-44b6-8cb5-41e0baf8daf1'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1950.979358] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87486cb2-4944-446c-8f0b-af927b9e4e9c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.000152] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fbe4d2-7458-4b05-b8e3-ad7a3bbf5730 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.031412] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] volume-977ada7e-edf6-44b6-8cb5-41e0baf8daf1/volume-977ada7e-edf6-44b6-8cb5-41e0baf8daf1.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1951.032910] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72e57436-bf83-440c-a674-d5a8f85037a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.056116] env[63279]: DEBUG oslo_concurrency.lockutils [None req-25a71d01-b823-4a97-991b-a30e1ca6a359 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "f20e8eb2-847b-4988-abf9-ed5f9f65c25c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.082s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.060716] env[63279]: DEBUG oslo_vmware.api [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Waiting for the task: (returnval){ [ 1951.060716] env[63279]: value = "task-2087084" [ 1951.060716] env[63279]: _type = "Task" [ 1951.060716] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.071216] env[63279]: DEBUG oslo_vmware.api [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Task: {'id': task-2087084, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.083698] env[63279]: DEBUG oslo_vmware.api [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087075, 'name': RemoveSnapshot_Task, 'duration_secs': 1.209162} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.083876] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1951.084074] env[63279]: INFO nova.compute.manager [None req-e9eb7f67-3e31-4b74-ba7f-946bf6f2c529 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Took 17.61 seconds to snapshot the instance on the hypervisor. [ 1951.164960] env[63279]: DEBUG nova.network.neutron [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Updated VIF entry in instance network info cache for port 7dc16370-0621-49ff-9730-abdfd18ff164. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1951.165356] env[63279]: DEBUG nova.network.neutron [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Updating instance_info_cache with network_info: [{"id": "7dc16370-0621-49ff-9730-abdfd18ff164", "address": "fa:16:3e:a3:62:97", "network": {"id": "6ac200ff-e42c-439c-8fa3-3b5e08a5d5e2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1487576264-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "691f1159e7af42429cca85900fac343d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dc16370-06", "ovs_interfaceid": "7dc16370-0621-49ff-9730-abdfd18ff164", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1951.190786] env[63279]: DEBUG nova.compute.manager [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1951.191724] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1951.191944] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02900f71-cebf-4842-9ace-69a748949646 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.209674] env[63279]: DEBUG oslo_vmware.api [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Task: {'id': task-2087081, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.264329} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.214130] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1951.214330] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1951.214506] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1951.214674] env[63279]: INFO nova.compute.manager [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1951.214912] env[63279]: DEBUG oslo.service.loopingcall [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1951.215163] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1951.215580] env[63279]: DEBUG nova.compute.manager [-] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1951.215827] env[63279]: DEBUG nova.network.neutron [-] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1951.217629] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0604d89e-f4ce-483d-996f-a2e1a9480d80 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.224730] env[63279]: DEBUG oslo_vmware.api [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Waiting for the task: (returnval){ [ 1951.224730] env[63279]: value = "task-2087085" [ 1951.224730] env[63279]: _type = "Task" [ 1951.224730] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.238065] env[63279]: DEBUG oslo_vmware.api [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2087085, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.439655] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087082, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.447521] env[63279]: INFO nova.compute.manager [-] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Took 1.57 seconds to deallocate network for instance. [ 1951.469456] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087083, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.518668] env[63279]: DEBUG nova.compute.manager [req-1ea6ba8f-7d3f-4eb3-8eb5-90391d8658c3 req-24f0adbc-f27f-4d9a-bb0b-d651e778277b service nova] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Received event network-vif-deleted-c8a557ed-47ef-46c1-a803-10a8150cb66a {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1951.518894] env[63279]: DEBUG nova.compute.manager [req-1ea6ba8f-7d3f-4eb3-8eb5-90391d8658c3 req-24f0adbc-f27f-4d9a-bb0b-d651e778277b service nova] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Received event network-vif-deleted-b059c76d-75a5-432c-b648-12c287223d83 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1951.571654] env[63279]: DEBUG oslo_vmware.api [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Task: {'id': task-2087084, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.670274] env[63279]: DEBUG oslo_concurrency.lockutils [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] Releasing lock "refresh_cache-f375b54b-f9de-4529-b752-52c240aed532" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1951.670584] env[63279]: DEBUG nova.compute.manager [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Received event network-changed-b6a4dca7-a0ba-4270-8c1f-2afac4d449bb {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1951.670775] env[63279]: DEBUG nova.compute.manager [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Refreshing instance network info cache due to event network-changed-b6a4dca7-a0ba-4270-8c1f-2afac4d449bb. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1951.671051] env[63279]: DEBUG oslo_concurrency.lockutils [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] Acquiring lock "refresh_cache-c287072d-0ce9-4075-8895-0f64326ac303" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1951.671212] env[63279]: DEBUG oslo_concurrency.lockutils [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] Acquired lock "refresh_cache-c287072d-0ce9-4075-8895-0f64326ac303" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1951.671389] env[63279]: DEBUG nova.network.neutron [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Refreshing network info cache for port b6a4dca7-a0ba-4270-8c1f-2afac4d449bb {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1951.724545] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78174f8f-e6cd-4e97-b2e5-15af1d218ad4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.741228] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07547aa4-10bf-4885-9bda-e6e07f6d35a9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.745679] env[63279]: DEBUG oslo_vmware.api [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2087085, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.783286] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1281217c-6915-4bc6-b24d-2450095ad4a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.797142] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c61afec-e33a-4d42-a2be-6d73f4e6e8be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.816329] env[63279]: DEBUG nova.compute.provider_tree [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1951.877438] env[63279]: DEBUG nova.network.neutron [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Successfully updated port: 8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1951.941721] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087082, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.967583] env[63279]: DEBUG oslo_concurrency.lockutils [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.967940] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087083, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.072108] env[63279]: DEBUG oslo_vmware.api [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Task: {'id': task-2087084, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.167614] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "ff2f355a-9687-4491-b243-6133e4b7b866" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.167614] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "ff2f355a-9687-4491-b243-6133e4b7b866" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.167614] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "ff2f355a-9687-4491-b243-6133e4b7b866-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.167868] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "ff2f355a-9687-4491-b243-6133e4b7b866-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.167868] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "ff2f355a-9687-4491-b243-6133e4b7b866-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.169991] env[63279]: INFO nova.compute.manager [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Terminating instance [ 1952.238744] env[63279]: DEBUG oslo_vmware.api [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2087085, 'name': PowerOffVM_Task, 'duration_secs': 0.572249} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.241598] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1952.241856] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1952.243321] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e43c43d-d3bd-4532-966a-4167145c22dd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.318785] env[63279]: DEBUG nova.scheduler.client.report [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1952.347841] env[63279]: DEBUG nova.network.neutron [-] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1952.351819] env[63279]: DEBUG nova.network.neutron [-] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1952.356955] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1952.356955] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1952.356955] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Deleting the datastore file [datastore1] fcc5a636-554f-424e-a604-a8e7bd7cf574 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1952.356955] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a6d746a-5520-4194-8cc1-58c00d9d029a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.363053] env[63279]: DEBUG oslo_vmware.api [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Waiting for the task: (returnval){ [ 1952.363053] env[63279]: value = "task-2087087" [ 1952.363053] env[63279]: _type = "Task" [ 1952.363053] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.372961] env[63279]: DEBUG oslo_vmware.api [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2087087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.383149] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquiring lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1952.383281] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquired lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1952.383419] env[63279]: DEBUG nova.network.neutron [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1952.446206] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087082, 'name': ReconfigVM_Task, 'duration_secs': 1.42013} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.446477] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Reconfigured VM instance instance-00000026 to attach disk [datastore1] cf1b70af-335d-404b-bb4f-fe082dd6f450/cf1b70af-335d-404b-bb4f-fe082dd6f450.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1952.447177] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5acbf98a-1f19-41cc-87b7-691b0157d4e6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.453855] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Waiting for the task: (returnval){ [ 1952.453855] env[63279]: value = "task-2087088" [ 1952.453855] env[63279]: _type = "Task" [ 1952.453855] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.474836] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087083, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.045242} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.474836] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087088, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.474979] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] c287072d-0ce9-4075-8895-0f64326ac303/c287072d-0ce9-4075-8895-0f64326ac303.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1952.475166] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1952.475473] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3b2c1f7e-bea6-46a8-9fb0-15519eff8714 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.484570] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Waiting for the task: (returnval){ [ 1952.484570] env[63279]: value = "task-2087089" [ 1952.484570] env[63279]: _type = "Task" [ 1952.484570] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.499372] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087089, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.576243] env[63279]: DEBUG oslo_vmware.api [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Task: {'id': task-2087084, 'name': ReconfigVM_Task, 'duration_secs': 1.341343} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.576243] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Reconfigured VM instance instance-00000010 to attach disk [datastore1] volume-977ada7e-edf6-44b6-8cb5-41e0baf8daf1/volume-977ada7e-edf6-44b6-8cb5-41e0baf8daf1.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1952.581246] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cef531c-bdb7-4f4f-963c-46471d544077 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.591862] env[63279]: DEBUG nova.network.neutron [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Updated VIF entry in instance network info cache for port b6a4dca7-a0ba-4270-8c1f-2afac4d449bb. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1952.592231] env[63279]: DEBUG nova.network.neutron [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Updating instance_info_cache with network_info: [{"id": "b6a4dca7-a0ba-4270-8c1f-2afac4d449bb", "address": "fa:16:3e:78:ce:e6", "network": {"id": "f1132cd7-bc01-4f0f-a7d3-e9d52254864b", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-852485082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a6f286a404d44aab6d17264b25f49f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6a4dca7-a0", "ovs_interfaceid": "b6a4dca7-a0ba-4270-8c1f-2afac4d449bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1952.600937] env[63279]: DEBUG nova.compute.manager [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Received event network-vif-plugged-8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1952.600937] env[63279]: DEBUG oslo_concurrency.lockutils [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] Acquiring lock "044335c7-ce3b-4b4a-b1dc-8b9acec538b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.600937] env[63279]: DEBUG oslo_concurrency.lockutils [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] Lock "044335c7-ce3b-4b4a-b1dc-8b9acec538b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.600937] env[63279]: DEBUG oslo_concurrency.lockutils [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] Lock "044335c7-ce3b-4b4a-b1dc-8b9acec538b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.600937] env[63279]: DEBUG nova.compute.manager [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] No waiting events found dispatching network-vif-plugged-8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1952.601305] env[63279]: WARNING nova.compute.manager [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Received unexpected event network-vif-plugged-8aa70697-1a19-4867-93bc-f264c16829ce for instance with vm_state building and task_state spawning. [ 1952.601305] env[63279]: DEBUG nova.compute.manager [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Received event network-vif-deleted-02e5d9ab-c16c-4141-a131-b7e9f2a2a194 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1952.601305] env[63279]: DEBUG nova.compute.manager [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Received event network-changed-8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1952.601305] env[63279]: DEBUG nova.compute.manager [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Refreshing instance network info cache due to event network-changed-8aa70697-1a19-4867-93bc-f264c16829ce. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1952.601473] env[63279]: DEBUG oslo_concurrency.lockutils [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] Acquiring lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1952.607176] env[63279]: DEBUG oslo_vmware.api [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Waiting for the task: (returnval){ [ 1952.607176] env[63279]: value = "task-2087090" [ 1952.607176] env[63279]: _type = "Task" [ 1952.607176] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.621377] env[63279]: DEBUG oslo_vmware.api [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Task: {'id': task-2087090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.673480] env[63279]: DEBUG nova.compute.manager [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1952.673697] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1952.675740] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47519d99-39f3-4263-bab5-eb006df0a779 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.687344] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1952.687663] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3d8ab56-c6cf-43fc-8fa2-9b8e00bee29e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.693762] env[63279]: DEBUG oslo_vmware.api [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1952.693762] env[63279]: value = "task-2087091" [ 1952.693762] env[63279]: _type = "Task" [ 1952.693762] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.701937] env[63279]: DEBUG oslo_vmware.api [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2087091, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.826843] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.312s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.829406] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.746s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.829686] env[63279]: DEBUG nova.objects.instance [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lazy-loading 'resources' on Instance uuid 5572bb1c-b378-4531-8459-10c2a2b7afdf {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1952.853019] env[63279]: INFO nova.compute.manager [-] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Took 2.46 seconds to deallocate network for instance. [ 1952.855223] env[63279]: INFO nova.compute.manager [-] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Took 1.64 seconds to deallocate network for instance. [ 1952.860622] env[63279]: INFO nova.scheduler.client.report [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Deleted allocations for instance 0224e4ea-c13c-4abd-9626-6038c0bbe4e9 [ 1952.877419] env[63279]: DEBUG oslo_vmware.api [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Task: {'id': task-2087087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160747} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.877757] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1952.877987] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1952.878273] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1952.878610] env[63279]: INFO nova.compute.manager [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Took 1.69 seconds to destroy the instance on the hypervisor. [ 1952.878774] env[63279]: DEBUG oslo.service.loopingcall [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1952.879654] env[63279]: DEBUG nova.compute.manager [-] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1952.879725] env[63279]: DEBUG nova.network.neutron [-] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1952.930045] env[63279]: DEBUG nova.network.neutron [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1952.963884] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087088, 'name': Rename_Task, 'duration_secs': 0.184429} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.966546] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1952.966911] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7effe94-9b84-46d6-ad08-a5106913792d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.974046] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Waiting for the task: (returnval){ [ 1952.974046] env[63279]: value = "task-2087092" [ 1952.974046] env[63279]: _type = "Task" [ 1952.974046] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.983853] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087092, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.001609] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087089, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071968} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.002276] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1953.002729] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2cf16a1-8e08-4e14-9e65-572fec9eec29 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.029138] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] c287072d-0ce9-4075-8895-0f64326ac303/c287072d-0ce9-4075-8895-0f64326ac303.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1953.036018] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3759862f-5dcb-4192-aef0-b045a60ce4ab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.058259] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Waiting for the task: (returnval){ [ 1953.058259] env[63279]: value = "task-2087093" [ 1953.058259] env[63279]: _type = "Task" [ 1953.058259] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.067128] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087093, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.104827] env[63279]: DEBUG oslo_concurrency.lockutils [req-0e1244de-30b6-4e05-a8b9-32382b5d7cbd req-69a3a5af-fdf6-4a10-b600-7396d6165c71 service nova] Releasing lock "refresh_cache-c287072d-0ce9-4075-8895-0f64326ac303" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1953.121532] env[63279]: DEBUG oslo_vmware.api [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Task: {'id': task-2087090, 'name': ReconfigVM_Task, 'duration_secs': 0.14371} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.122857] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427603', 'volume_id': '977ada7e-edf6-44b6-8cb5-41e0baf8daf1', 'name': 'volume-977ada7e-edf6-44b6-8cb5-41e0baf8daf1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5d4909ea-396c-45ba-9ff5-acb8576150b3', 'attached_at': '', 'detached_at': '', 'volume_id': '977ada7e-edf6-44b6-8cb5-41e0baf8daf1', 'serial': '977ada7e-edf6-44b6-8cb5-41e0baf8daf1'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1953.205577] env[63279]: DEBUG oslo_vmware.api [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2087091, 'name': PowerOffVM_Task, 'duration_secs': 0.185254} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.205577] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1953.205577] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1953.205858] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ef60d1a-741f-4dbc-8a3f-d2baf32a7d45 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.228397] env[63279]: DEBUG nova.network.neutron [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Updating instance_info_cache with network_info: [{"id": "8aa70697-1a19-4867-93bc-f264c16829ce", "address": "fa:16:3e:27:56:a2", "network": {"id": "d065f991-0cec-4435-b5a1-ec50614f5486", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-780609743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ea8d4af9e9b4df8930ef52450936dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aa70697-1a", "ovs_interfaceid": "8aa70697-1a19-4867-93bc-f264c16829ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1953.327869] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1953.328052] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1953.329016] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Deleting the datastore file [datastore1] ff2f355a-9687-4491-b243-6133e4b7b866 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1953.329016] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b87658f-422f-4216-93cd-cf9d73081d1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.341038] env[63279]: DEBUG oslo_vmware.api [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1953.341038] env[63279]: value = "task-2087095" [ 1953.341038] env[63279]: _type = "Task" [ 1953.341038] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.354017] env[63279]: DEBUG oslo_vmware.api [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2087095, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.364522] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.369553] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.372364] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8ff9a3bb-99fe-487d-a326-1ba0f47e3992 tempest-ServersAdminNegativeTestJSON-968304097 tempest-ServersAdminNegativeTestJSON-968304097-project-member] Lock "0224e4ea-c13c-4abd-9626-6038c0bbe4e9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.638s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.485686] env[63279]: DEBUG oslo_vmware.api [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087092, 'name': PowerOnVM_Task, 'duration_secs': 0.485489} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.487514] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1953.488478] env[63279]: INFO nova.compute.manager [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Took 9.31 seconds to spawn the instance on the hypervisor. [ 1953.488478] env[63279]: DEBUG nova.compute.manager [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1953.490466] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f5e790-6fbc-4a81-9619-c2053f258d3b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.569027] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087093, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.730759] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Releasing lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1953.731103] env[63279]: DEBUG nova.compute.manager [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Instance network_info: |[{"id": "8aa70697-1a19-4867-93bc-f264c16829ce", "address": "fa:16:3e:27:56:a2", "network": {"id": "d065f991-0cec-4435-b5a1-ec50614f5486", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-780609743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ea8d4af9e9b4df8930ef52450936dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aa70697-1a", "ovs_interfaceid": "8aa70697-1a19-4867-93bc-f264c16829ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1953.732668] env[63279]: DEBUG oslo_concurrency.lockutils [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] Acquired lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1953.732668] env[63279]: DEBUG nova.network.neutron [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Refreshing network info cache for port 8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1953.733053] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:56:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8aa70697-1a19-4867-93bc-f264c16829ce', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1953.742180] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Creating folder: Project (2ea8d4af9e9b4df8930ef52450936dcf). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1953.746988] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14593948-64b9-4585-bda6-34ff0fd29e2a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.760244] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Created folder: Project (2ea8d4af9e9b4df8930ef52450936dcf) in parent group-v427491. [ 1953.760244] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Creating folder: Instances. Parent ref: group-v427610. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1953.762048] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99c9c8a6-7203-408f-ad99-2deb28f7d6cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.772498] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Created folder: Instances in parent group-v427610. [ 1953.772498] env[63279]: DEBUG oslo.service.loopingcall [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1953.772960] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1953.773659] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10238051-09a7-4201-9e31-cda38a1a5f8b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.794813] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1953.794813] env[63279]: value = "task-2087098" [ 1953.794813] env[63279]: _type = "Task" [ 1953.794813] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.806871] env[63279]: DEBUG nova.compute.manager [req-62a353aa-5bd7-4a21-85bf-36c7585a07d2 req-4e68e112-5955-44ff-a214-94f080c33fec service nova] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Received event network-vif-deleted-bbcf46a8-cec2-4c54-8c6a-c1ba126b1676 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1953.807540] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087098, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.850905] env[63279]: DEBUG oslo_vmware.api [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2087095, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.270152} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.851228] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1953.851417] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1953.851647] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1953.851831] env[63279]: INFO nova.compute.manager [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1953.852012] env[63279]: DEBUG oslo.service.loopingcall [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1953.855120] env[63279]: DEBUG nova.compute.manager [-] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1953.855120] env[63279]: DEBUG nova.network.neutron [-] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1954.002694] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34f47b4-c901-4696-a012-3896e63f0612 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.018442] env[63279]: INFO nova.compute.manager [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Took 48.80 seconds to build instance. [ 1954.020447] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6dda62-3473-487b-8270-18c81634c76e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.027188] env[63279]: DEBUG nova.network.neutron [-] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1954.059467] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ed45cb-5970-4d7b-935f-88bb19cb9823 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.075109] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c0a2f3-5c8c-44ac-bf41-abcfeb1b4dab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.079517] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087093, 'name': ReconfigVM_Task, 'duration_secs': 0.543586} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.079771] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Reconfigured VM instance instance-00000027 to attach disk [datastore1] c287072d-0ce9-4075-8895-0f64326ac303/c287072d-0ce9-4075-8895-0f64326ac303.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1954.084024] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30e9a4ac-c3d1-4f02-b02b-1efbc245f68a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.091374] env[63279]: DEBUG nova.compute.provider_tree [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1954.095690] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Waiting for the task: (returnval){ [ 1954.095690] env[63279]: value = "task-2087099" [ 1954.095690] env[63279]: _type = "Task" [ 1954.095690] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.105286] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087099, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.180954] env[63279]: DEBUG nova.objects.instance [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Lazy-loading 'flavor' on Instance uuid 5d4909ea-396c-45ba-9ff5-acb8576150b3 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1954.222498] env[63279]: DEBUG nova.compute.manager [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1954.222498] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0bc0d2-1898-4e9a-8fbd-eb06f0ed675c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.305126] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087098, 'name': CreateVM_Task, 'duration_secs': 0.396663} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.305315] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1954.306101] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1954.306284] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1954.306975] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1954.306975] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe72844c-b79f-45b8-814e-f645371add47 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.312245] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1954.312245] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a4271b-e8ba-6590-8292-d2f2ca14f376" [ 1954.312245] env[63279]: _type = "Task" [ 1954.312245] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.320668] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a4271b-e8ba-6590-8292-d2f2ca14f376, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.482746] env[63279]: DEBUG nova.network.neutron [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Updated VIF entry in instance network info cache for port 8aa70697-1a19-4867-93bc-f264c16829ce. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1954.482746] env[63279]: DEBUG nova.network.neutron [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Updating instance_info_cache with network_info: [{"id": "8aa70697-1a19-4867-93bc-f264c16829ce", "address": "fa:16:3e:27:56:a2", "network": {"id": "d065f991-0cec-4435-b5a1-ec50614f5486", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-780609743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ea8d4af9e9b4df8930ef52450936dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aa70697-1a", "ovs_interfaceid": "8aa70697-1a19-4867-93bc-f264c16829ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1954.525219] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e3daa27-53b9-4834-adeb-4cde61720229 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Lock "cf1b70af-335d-404b-bb4f-fe082dd6f450" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.479s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.532314] env[63279]: INFO nova.compute.manager [-] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Took 1.65 seconds to deallocate network for instance. [ 1954.597017] env[63279]: DEBUG nova.scheduler.client.report [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1954.608517] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087099, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.679960] env[63279]: DEBUG nova.network.neutron [-] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1954.686309] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9a5f5cc6-f79e-4299-9ca0-1f387568bf34 tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.416s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.736263] env[63279]: INFO nova.compute.manager [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] instance snapshotting [ 1954.739802] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-414935ed-a0fd-4882-8c50-24e3c11fe339 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.761625] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7bdfadd-7ca4-471e-9d14-697ee85a248e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.827258] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a4271b-e8ba-6590-8292-d2f2ca14f376, 'name': SearchDatastore_Task, 'duration_secs': 0.01068} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.827586] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1954.827820] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1954.828623] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1954.828835] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1954.829060] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1954.829623] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a8f7302-d20c-4a0a-81d6-d5d97b533295 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.839913] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1954.840332] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1954.841249] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-811c0404-4b34-4eb9-bcf8-b4a0abe4cb79 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.846931] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1954.846931] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a9abdc-3a0a-434a-b0e9-cdc54839f32a" [ 1954.846931] env[63279]: _type = "Task" [ 1954.846931] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.861869] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a9abdc-3a0a-434a-b0e9-cdc54839f32a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.878902] env[63279]: DEBUG nova.compute.manager [req-ad9c0957-4f00-4299-a545-cbc20df52639 req-3581e270-b5f0-4d94-a1b1-32b5e71b695c service nova] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Received event network-vif-deleted-e1063b89-be88-474a-a2ec-b61eb11cf9fe {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1954.879307] env[63279]: DEBUG nova.compute.manager [req-ad9c0957-4f00-4299-a545-cbc20df52639 req-3581e270-b5f0-4d94-a1b1-32b5e71b695c service nova] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Received event network-vif-deleted-e3bb5b3e-6ab2-41f8-95fb-5e459b56bec8 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1954.985537] env[63279]: DEBUG oslo_concurrency.lockutils [req-bba88562-7346-452c-9979-97ebf5897009 req-79ca59bb-5880-4221-9d84-4d1d19361d8c service nova] Releasing lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1955.030640] env[63279]: DEBUG nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1955.040256] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.104360] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.274s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.110992] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.074s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.114563] env[63279]: INFO nova.compute.claims [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1955.117488] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087099, 'name': Rename_Task, 'duration_secs': 0.794228} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.119132] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1955.119576] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5fba2bc9-3bea-4b07-8f89-adba9fc38ce4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.127904] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Waiting for the task: (returnval){ [ 1955.127904] env[63279]: value = "task-2087100" [ 1955.127904] env[63279]: _type = "Task" [ 1955.127904] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.133617] env[63279]: INFO nova.scheduler.client.report [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleted allocations for instance 5572bb1c-b378-4531-8459-10c2a2b7afdf [ 1955.145904] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087100, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.146304] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Acquiring lock "e04f06de-da6a-4562-a50a-ff16bf3a006e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.146631] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Lock "e04f06de-da6a-4562-a50a-ff16bf3a006e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.146927] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Acquiring lock "e04f06de-da6a-4562-a50a-ff16bf3a006e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.147249] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Lock "e04f06de-da6a-4562-a50a-ff16bf3a006e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.147525] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Lock "e04f06de-da6a-4562-a50a-ff16bf3a006e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.151915] env[63279]: INFO nova.compute.manager [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Terminating instance [ 1955.183017] env[63279]: INFO nova.compute.manager [-] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Took 1.33 seconds to deallocate network for instance. [ 1955.275921] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1955.276292] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f99c9fec-ebbd-4abe-b3c0-86272f60b735 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.288120] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1955.288120] env[63279]: value = "task-2087101" [ 1955.288120] env[63279]: _type = "Task" [ 1955.288120] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.303260] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087101, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.360568] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Acquiring lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.360992] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.362459] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a9abdc-3a0a-434a-b0e9-cdc54839f32a, 'name': SearchDatastore_Task, 'duration_secs': 0.018249} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.363485] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1adb71d6-0451-4b4a-9c19-96d79321c8fa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.371031] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1955.371031] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52fd1a09-a256-6889-96d8-8dafcc45fbea" [ 1955.371031] env[63279]: _type = "Task" [ 1955.371031] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.383911] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fd1a09-a256-6889-96d8-8dafcc45fbea, 'name': SearchDatastore_Task, 'duration_secs': 0.010921} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.384203] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1955.384485] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 044335c7-ce3b-4b4a-b1dc-8b9acec538b4/044335c7-ce3b-4b4a-b1dc-8b9acec538b4.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1955.384767] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e2c0ba3-61f6-4cac-8de2-3a8340c7bbbd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.393983] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1955.393983] env[63279]: value = "task-2087102" [ 1955.393983] env[63279]: _type = "Task" [ 1955.393983] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.406340] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.561826] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.643164] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c91533fe-264f-4fd8-930f-c4d6558a1a1d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "5572bb1c-b378-4531-8459-10c2a2b7afdf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.559s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.645513] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087100, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.657623] env[63279]: DEBUG nova.compute.manager [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1955.657949] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1955.659053] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4bd247f-da62-4e40-b58b-36b86c249404 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.670227] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1955.670540] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c088ef8-72a6-49f0-8d11-47325d655737 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.679256] env[63279]: DEBUG oslo_vmware.api [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Waiting for the task: (returnval){ [ 1955.679256] env[63279]: value = "task-2087103" [ 1955.679256] env[63279]: _type = "Task" [ 1955.679256] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.690966] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.691298] env[63279]: DEBUG oslo_vmware.api [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087103, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.785079] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Acquiring lock "cf1b70af-335d-404b-bb4f-fe082dd6f450" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.785452] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Lock "cf1b70af-335d-404b-bb4f-fe082dd6f450" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.785642] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Acquiring lock "cf1b70af-335d-404b-bb4f-fe082dd6f450-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.785817] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Lock "cf1b70af-335d-404b-bb4f-fe082dd6f450-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.786066] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Lock "cf1b70af-335d-404b-bb4f-fe082dd6f450-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.788797] env[63279]: INFO nova.compute.manager [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Terminating instance [ 1955.805493] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087101, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.864789] env[63279]: INFO nova.compute.manager [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Detaching volume 977ada7e-edf6-44b6-8cb5-41e0baf8daf1 [ 1955.903820] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087102, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488142} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.904912] env[63279]: INFO nova.virt.block_device [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Attempting to driver detach volume 977ada7e-edf6-44b6-8cb5-41e0baf8daf1 from mountpoint /dev/sdb [ 1955.905145] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1955.905337] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427603', 'volume_id': '977ada7e-edf6-44b6-8cb5-41e0baf8daf1', 'name': 'volume-977ada7e-edf6-44b6-8cb5-41e0baf8daf1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5d4909ea-396c-45ba-9ff5-acb8576150b3', 'attached_at': '', 'detached_at': '', 'volume_id': '977ada7e-edf6-44b6-8cb5-41e0baf8daf1', 'serial': '977ada7e-edf6-44b6-8cb5-41e0baf8daf1'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1955.905636] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 044335c7-ce3b-4b4a-b1dc-8b9acec538b4/044335c7-ce3b-4b4a-b1dc-8b9acec538b4.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1955.905840] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1955.906592] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c46e256-404a-4833-b552-4c7b532de453 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.909260] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a36de274-262b-4130-ace5-ad9538fabe52 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.931817] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013f6a26-7122-4dba-b2bd-f392107a0038 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.934623] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1955.934623] env[63279]: value = "task-2087104" [ 1955.934623] env[63279]: _type = "Task" [ 1955.934623] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.940925] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6965ffa0-e94f-4d23-afe8-fe3c11e444e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.946750] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087104, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.968717] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5344b6cb-4d57-49aa-be63-e3aa9484ed25 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.984363] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] The volume has not been displaced from its original location: [datastore1] volume-977ada7e-edf6-44b6-8cb5-41e0baf8daf1/volume-977ada7e-edf6-44b6-8cb5-41e0baf8daf1.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1955.989614] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Reconfiguring VM instance instance-00000010 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1955.989941] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c67367c0-87bf-4019-a895-630d09328d4f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.008493] env[63279]: DEBUG oslo_vmware.api [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Waiting for the task: (returnval){ [ 1956.008493] env[63279]: value = "task-2087105" [ 1956.008493] env[63279]: _type = "Task" [ 1956.008493] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.016277] env[63279]: DEBUG oslo_vmware.api [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Task: {'id': task-2087105, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.138937] env[63279]: DEBUG oslo_vmware.api [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087100, 'name': PowerOnVM_Task, 'duration_secs': 0.569491} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.139244] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1956.139447] env[63279]: INFO nova.compute.manager [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Took 8.85 seconds to spawn the instance on the hypervisor. [ 1956.139628] env[63279]: DEBUG nova.compute.manager [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1956.140398] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d45fcbc-60de-40c3-a252-e13aca44c36a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.189849] env[63279]: DEBUG oslo_vmware.api [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087103, 'name': PowerOffVM_Task, 'duration_secs': 0.252608} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.190167] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1956.190342] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1956.191432] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9520c874-b7e0-4e99-878a-a5363c25c81d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.300226] env[63279]: DEBUG nova.compute.manager [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1956.300454] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1956.300959] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087101, 'name': CreateSnapshot_Task, 'duration_secs': 0.643623} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.301744] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03912f47-c8cf-4c8e-920b-0b6dc20a86d5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.305629] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1956.305876] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-491a576d-433c-40c0-901e-136641926135 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.312936] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1956.313380] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1956.313576] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Deleting the datastore file [datastore1] e04f06de-da6a-4562-a50a-ff16bf3a006e {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1956.315332] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5880380e-ac21-484b-a981-d564f29718a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.327876] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1956.333532] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64fee55b-c87b-4952-9ce4-3a1ee6b0683d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.342700] env[63279]: DEBUG oslo_vmware.api [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Waiting for the task: (returnval){ [ 1956.342700] env[63279]: value = "task-2087108" [ 1956.342700] env[63279]: _type = "Task" [ 1956.342700] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.343383] env[63279]: DEBUG oslo_vmware.api [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Waiting for the task: (returnval){ [ 1956.343383] env[63279]: value = "task-2087107" [ 1956.343383] env[63279]: _type = "Task" [ 1956.343383] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.356997] env[63279]: DEBUG oslo_vmware.api [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087108, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.363302] env[63279]: DEBUG oslo_vmware.api [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087107, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.446059] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087104, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101458} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.448826] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1956.450132] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c44f0a7-8503-42e1-800f-2ed4723d0336 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.474054] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 044335c7-ce3b-4b4a-b1dc-8b9acec538b4/044335c7-ce3b-4b4a-b1dc-8b9acec538b4.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1956.477534] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-352a04d4-c322-4676-854c-e2616d565a10 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.499513] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1956.499513] env[63279]: value = "task-2087109" [ 1956.499513] env[63279]: _type = "Task" [ 1956.499513] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.509998] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087109, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.517970] env[63279]: DEBUG oslo_vmware.api [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Task: {'id': task-2087105, 'name': ReconfigVM_Task, 'duration_secs': 0.26473} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.518753] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Reconfigured VM instance instance-00000010 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1956.526507] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbd0a6c6-f63e-44e4-82e4-4d9859e5934c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.544033] env[63279]: DEBUG oslo_vmware.api [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Waiting for the task: (returnval){ [ 1956.544033] env[63279]: value = "task-2087110" [ 1956.544033] env[63279]: _type = "Task" [ 1956.544033] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.554421] env[63279]: DEBUG oslo_vmware.api [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Task: {'id': task-2087110, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.664544] env[63279]: INFO nova.compute.manager [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Took 49.53 seconds to build instance. [ 1956.745715] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35ca977-ce08-40a2-9f41-c596e4a4a4f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.753820] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1018bfa0-19ee-4d76-b1d3-03c49281ed0e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.784320] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90cdf0e1-7b0d-433b-8bf8-5ed5ddc4cbba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.792099] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4273c969-a258-4195-8dbc-6f69a1b79de7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.807976] env[63279]: DEBUG nova.compute.provider_tree [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1956.840374] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1956.840646] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-db0d37fc-6897-4745-a055-3cf591630db2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.851085] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1956.851085] env[63279]: value = "task-2087111" [ 1956.851085] env[63279]: _type = "Task" [ 1956.851085] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.859508] env[63279]: DEBUG oslo_vmware.api [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Task: {'id': task-2087107, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203582} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.859758] env[63279]: DEBUG oslo_vmware.api [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087108, 'name': PowerOffVM_Task, 'duration_secs': 0.18477} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.860307] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1956.860517] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1956.860732] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1956.860990] env[63279]: INFO nova.compute.manager [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1956.861253] env[63279]: DEBUG oslo.service.loopingcall [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1956.861459] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1956.861611] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1956.864553] env[63279]: DEBUG nova.compute.manager [-] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1956.864636] env[63279]: DEBUG nova.network.neutron [-] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1956.866102] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9982e61a-f712-41ec-9573-989f31447f03 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.867433] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087111, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.009975] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087109, 'name': ReconfigVM_Task, 'duration_secs': 0.312317} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.010795] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 044335c7-ce3b-4b4a-b1dc-8b9acec538b4/044335c7-ce3b-4b4a-b1dc-8b9acec538b4.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1957.010967] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d270f3fd-b876-4be2-bf43-42c5884bb7d6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.017601] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1957.017601] env[63279]: value = "task-2087113" [ 1957.017601] env[63279]: _type = "Task" [ 1957.017601] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.026255] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087113, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.054887] env[63279]: DEBUG oslo_vmware.api [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Task: {'id': task-2087110, 'name': ReconfigVM_Task, 'duration_secs': 0.161102} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.055217] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427603', 'volume_id': '977ada7e-edf6-44b6-8cb5-41e0baf8daf1', 'name': 'volume-977ada7e-edf6-44b6-8cb5-41e0baf8daf1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5d4909ea-396c-45ba-9ff5-acb8576150b3', 'attached_at': '', 'detached_at': '', 'volume_id': '977ada7e-edf6-44b6-8cb5-41e0baf8daf1', 'serial': '977ada7e-edf6-44b6-8cb5-41e0baf8daf1'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1957.169465] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a527ddef-d877-4b24-a0b1-c60958f9b4c7 tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Lock "c287072d-0ce9-4075-8895-0f64326ac303" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.877s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.181056] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1957.181120] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1957.181602] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Deleting the datastore file [datastore1] cf1b70af-335d-404b-bb4f-fe082dd6f450 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1957.181602] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebd44ba7-92f5-4b2a-8b5b-e85c405d113a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.190463] env[63279]: DEBUG oslo_vmware.api [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Waiting for the task: (returnval){ [ 1957.190463] env[63279]: value = "task-2087114" [ 1957.190463] env[63279]: _type = "Task" [ 1957.190463] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.201886] env[63279]: DEBUG oslo_vmware.api [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.310833] env[63279]: DEBUG nova.scheduler.client.report [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1957.363884] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087111, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.500141] env[63279]: DEBUG nova.compute.manager [req-fbed0dce-9849-46c0-bcba-c9dd8f6e0982 req-f73d81e1-57c6-47fc-b027-7121623bf996 service nova] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Received event network-vif-deleted-79f1cbee-348d-4f65-807f-291c2a96aa88 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1957.500141] env[63279]: INFO nova.compute.manager [req-fbed0dce-9849-46c0-bcba-c9dd8f6e0982 req-f73d81e1-57c6-47fc-b027-7121623bf996 service nova] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Neutron deleted interface 79f1cbee-348d-4f65-807f-291c2a96aa88; detaching it from the instance and deleting it from the info cache [ 1957.500141] env[63279]: DEBUG nova.network.neutron [req-fbed0dce-9849-46c0-bcba-c9dd8f6e0982 req-f73d81e1-57c6-47fc-b027-7121623bf996 service nova] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1957.532686] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087113, 'name': Rename_Task, 'duration_secs': 0.13828} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.532686] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1957.532686] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bed84dae-246e-467b-8721-5cf15b654880 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.539518] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1957.539518] env[63279]: value = "task-2087115" [ 1957.539518] env[63279]: _type = "Task" [ 1957.539518] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.559944] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087115, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.630471] env[63279]: DEBUG nova.objects.instance [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Lazy-loading 'flavor' on Instance uuid 5d4909ea-396c-45ba-9ff5-acb8576150b3 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1957.672315] env[63279]: DEBUG nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1957.700950] env[63279]: DEBUG oslo_vmware.api [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Task: {'id': task-2087114, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185932} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.701847] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1957.702083] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1957.703371] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1957.703371] env[63279]: INFO nova.compute.manager [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Took 1.40 seconds to destroy the instance on the hypervisor. [ 1957.703371] env[63279]: DEBUG oslo.service.loopingcall [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1957.703371] env[63279]: DEBUG nova.compute.manager [-] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1957.703371] env[63279]: DEBUG nova.network.neutron [-] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1957.818668] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.707s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.818668] env[63279]: DEBUG nova.compute.manager [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1957.822593] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.115s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.824355] env[63279]: INFO nova.compute.claims [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1957.834351] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Acquiring lock "c287072d-0ce9-4075-8895-0f64326ac303" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.834608] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Lock "c287072d-0ce9-4075-8895-0f64326ac303" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.834861] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Acquiring lock "c287072d-0ce9-4075-8895-0f64326ac303-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.834998] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Lock "c287072d-0ce9-4075-8895-0f64326ac303-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.835226] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Lock "c287072d-0ce9-4075-8895-0f64326ac303-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.839321] env[63279]: INFO nova.compute.manager [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Terminating instance [ 1957.862892] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087111, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.893559] env[63279]: DEBUG nova.network.neutron [-] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.005526] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7752669c-a4fc-4ed5-822a-e98e3c72088c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.015098] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d4832c-1497-463e-871d-d59c31119a68 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.047725] env[63279]: DEBUG nova.compute.manager [req-fbed0dce-9849-46c0-bcba-c9dd8f6e0982 req-f73d81e1-57c6-47fc-b027-7121623bf996 service nova] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Detach interface failed, port_id=79f1cbee-348d-4f65-807f-291c2a96aa88, reason: Instance e04f06de-da6a-4562-a50a-ff16bf3a006e could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 1958.056327] env[63279]: DEBUG oslo_vmware.api [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087115, 'name': PowerOnVM_Task, 'duration_secs': 0.454951} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.056608] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1958.056826] env[63279]: INFO nova.compute.manager [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Took 7.84 seconds to spawn the instance on the hypervisor. [ 1958.057017] env[63279]: DEBUG nova.compute.manager [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1958.057838] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72dccf79-7f46-4c4c-a4bc-8f011e46d950 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.197841] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.329802] env[63279]: DEBUG nova.compute.utils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1958.333663] env[63279]: DEBUG nova.compute.manager [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1958.333918] env[63279]: DEBUG nova.network.neutron [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1958.346494] env[63279]: DEBUG nova.compute.manager [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1958.348999] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1958.351112] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb63ce94-686e-48ab-b641-f8ba96e5ad5f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.364221] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087111, 'name': CloneVM_Task} progress is 95%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.366505] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1958.366782] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c535378e-9491-49df-937b-3c49ca5146d6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.373799] env[63279]: DEBUG oslo_vmware.api [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Waiting for the task: (returnval){ [ 1958.373799] env[63279]: value = "task-2087116" [ 1958.373799] env[63279]: _type = "Task" [ 1958.373799] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.382194] env[63279]: DEBUG oslo_vmware.api [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087116, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.391150] env[63279]: DEBUG nova.policy [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e4129c1c0ea45e2974332889fc4f2f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1f262353ca64ee68a601c1269c1281d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1958.396720] env[63279]: INFO nova.compute.manager [-] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Took 1.53 seconds to deallocate network for instance. [ 1958.511658] env[63279]: DEBUG nova.network.neutron [-] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.579254] env[63279]: INFO nova.compute.manager [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Took 46.93 seconds to build instance. [ 1958.639134] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1ddab78a-6903-4b6e-8851-2899eff0407b tempest-VolumesAssistedSnapshotsTest-67549343 tempest-VolumesAssistedSnapshotsTest-67549343-project-admin] Lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.278s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.794918] env[63279]: DEBUG nova.network.neutron [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Successfully created port: cd9181e2-70ee-4534-8313-5f40cf4f8f9c {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1958.834229] env[63279]: DEBUG nova.compute.manager [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1958.866040] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087111, 'name': CloneVM_Task, 'duration_secs': 1.857277} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.867282] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Created linked-clone VM from snapshot [ 1958.867608] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4191dac3-4c53-48d6-a88a-48996b928b9c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.876784] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Uploading image c12b287c-ce13-4bd1-a83d-94dad39d2a94 {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1958.888219] env[63279]: DEBUG oslo_vmware.api [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087116, 'name': PowerOffVM_Task, 'duration_secs': 0.163817} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.891498] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1958.891763] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1958.893277] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afcfb201-821d-4944-9dbd-b75f1bcad08e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.904278] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.914040] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1958.914040] env[63279]: value = "vm-427614" [ 1958.914040] env[63279]: _type = "VirtualMachine" [ 1958.914040] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1958.914574] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-009bb506-e78d-4d71-bf49-e79bbb9ab91e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.921532] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lease: (returnval){ [ 1958.921532] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b4e8bf-bacf-0352-131d-2c25827de2a6" [ 1958.921532] env[63279]: _type = "HttpNfcLease" [ 1958.921532] env[63279]: } obtained for exporting VM: (result){ [ 1958.921532] env[63279]: value = "vm-427614" [ 1958.921532] env[63279]: _type = "VirtualMachine" [ 1958.921532] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1958.921795] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the lease: (returnval){ [ 1958.921795] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b4e8bf-bacf-0352-131d-2c25827de2a6" [ 1958.921795] env[63279]: _type = "HttpNfcLease" [ 1958.921795] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1958.932139] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1958.932139] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b4e8bf-bacf-0352-131d-2c25827de2a6" [ 1958.932139] env[63279]: _type = "HttpNfcLease" [ 1958.932139] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1959.016567] env[63279]: INFO nova.compute.manager [-] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Took 1.31 seconds to deallocate network for instance. [ 1959.044166] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1959.044166] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1959.044166] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Deleting the datastore file [datastore1] c287072d-0ce9-4075-8895-0f64326ac303 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1959.044166] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b21056a2-c288-440f-8933-b4f1e6d74558 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.056700] env[63279]: DEBUG oslo_vmware.api [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Waiting for the task: (returnval){ [ 1959.056700] env[63279]: value = "task-2087120" [ 1959.056700] env[63279]: _type = "Task" [ 1959.056700] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.066511] env[63279]: DEBUG oslo_vmware.api [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087120, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.081212] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3600cbc-9fef-45fa-b2de-6e5d0ca54cdb tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Lock "044335c7-ce3b-4b4a-b1dc-8b9acec538b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.992s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.433892] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1959.433892] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b4e8bf-bacf-0352-131d-2c25827de2a6" [ 1959.433892] env[63279]: _type = "HttpNfcLease" [ 1959.433892] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1959.437186] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1959.437186] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b4e8bf-bacf-0352-131d-2c25827de2a6" [ 1959.437186] env[63279]: _type = "HttpNfcLease" [ 1959.437186] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1959.437186] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14dde287-6113-4e64-8864-7f1f71989dba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.444757] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5299020e-2a87-4b32-3866-0e89e96f687e/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1959.445030] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5299020e-2a87-4b32-3866-0e89e96f687e/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1959.447776] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c550e06-abdd-4468-b259-46244126c742 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.509054] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54232a5-4e1e-4a41-92e2-29bbb0b63d13 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.540209] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.543521] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb95ddcd-23e6-47b1-89cc-3b507b21694d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.547906] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-dde0023a-72bb-44ac-a0c9-2377f11d63eb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.558899] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d087fe-5ed2-44ce-94df-7740ff6eb7b4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.582109] env[63279]: DEBUG oslo_vmware.api [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Task: {'id': task-2087120, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139944} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.582582] env[63279]: DEBUG nova.compute.provider_tree [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1959.586587] env[63279]: DEBUG nova.compute.manager [req-bb402915-ff5a-4799-90f7-6a1b8e086269 req-d5e132dd-44fa-487b-bd25-4435a0ef7fc5 service nova] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Received event network-vif-deleted-67e6934e-98a0-4355-80f1-869c87d01ea0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1959.588140] env[63279]: DEBUG nova.compute.manager [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1959.590618] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1959.590696] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1959.590822] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1959.590996] env[63279]: INFO nova.compute.manager [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1959.591249] env[63279]: DEBUG oslo.service.loopingcall [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1959.593803] env[63279]: DEBUG nova.compute.manager [-] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1959.593803] env[63279]: DEBUG nova.network.neutron [-] [instance: c287072d-0ce9-4075-8895-0f64326ac303] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1959.850198] env[63279]: DEBUG nova.compute.manager [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1959.881114] env[63279]: DEBUG nova.virt.hardware [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1959.881114] env[63279]: DEBUG nova.virt.hardware [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1959.881114] env[63279]: DEBUG nova.virt.hardware [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1959.881502] env[63279]: DEBUG nova.virt.hardware [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1959.881680] env[63279]: DEBUG nova.virt.hardware [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1959.881833] env[63279]: DEBUG nova.virt.hardware [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1959.882057] env[63279]: DEBUG nova.virt.hardware [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1959.882433] env[63279]: DEBUG nova.virt.hardware [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1959.882433] env[63279]: DEBUG nova.virt.hardware [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1959.882539] env[63279]: DEBUG nova.virt.hardware [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1959.882723] env[63279]: DEBUG nova.virt.hardware [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1959.883641] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49aa918f-12b7-4bc4-b328-f17690f1d2ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.898355] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6021ac2a-a4c4-4d8e-979d-f14576d35de1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.054286] env[63279]: INFO nova.compute.manager [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Rescuing [ 1960.054584] env[63279]: DEBUG oslo_concurrency.lockutils [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquiring lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.054744] env[63279]: DEBUG oslo_concurrency.lockutils [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquired lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.054920] env[63279]: DEBUG nova.network.neutron [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1960.063992] env[63279]: DEBUG nova.compute.manager [req-eea81197-cfe2-415d-b00d-cca8dc913633 req-e00b32e6-ae2e-4baa-b586-6367c6dbf16d service nova] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Received event network-vif-deleted-b6a4dca7-a0ba-4270-8c1f-2afac4d449bb {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1960.064208] env[63279]: INFO nova.compute.manager [req-eea81197-cfe2-415d-b00d-cca8dc913633 req-e00b32e6-ae2e-4baa-b586-6367c6dbf16d service nova] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Neutron deleted interface b6a4dca7-a0ba-4270-8c1f-2afac4d449bb; detaching it from the instance and deleting it from the info cache [ 1960.064386] env[63279]: DEBUG nova.network.neutron [req-eea81197-cfe2-415d-b00d-cca8dc913633 req-e00b32e6-ae2e-4baa-b586-6367c6dbf16d service nova] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.089624] env[63279]: DEBUG nova.scheduler.client.report [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1960.133806] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.427075] env[63279]: DEBUG nova.network.neutron [-] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.572165] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6681ae71-c803-42ca-b9ab-db13b75c9972 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.588413] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184222c3-d4fd-47b0-b7c9-2280f149f366 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.606581] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.784s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.608017] env[63279]: DEBUG nova.compute.manager [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1960.611119] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.433s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.618597] env[63279]: INFO nova.compute.claims [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1960.648523] env[63279]: DEBUG nova.compute.manager [req-eea81197-cfe2-415d-b00d-cca8dc913633 req-e00b32e6-ae2e-4baa-b586-6367c6dbf16d service nova] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Detach interface failed, port_id=b6a4dca7-a0ba-4270-8c1f-2afac4d449bb, reason: Instance c287072d-0ce9-4075-8895-0f64326ac303 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 1960.867384] env[63279]: DEBUG nova.network.neutron [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Successfully updated port: cd9181e2-70ee-4534-8313-5f40cf4f8f9c {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1960.871667] env[63279]: DEBUG nova.network.neutron [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Updating instance_info_cache with network_info: [{"id": "8aa70697-1a19-4867-93bc-f264c16829ce", "address": "fa:16:3e:27:56:a2", "network": {"id": "d065f991-0cec-4435-b5a1-ec50614f5486", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-780609743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ea8d4af9e9b4df8930ef52450936dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aa70697-1a", "ovs_interfaceid": "8aa70697-1a19-4867-93bc-f264c16829ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.932226] env[63279]: INFO nova.compute.manager [-] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Took 1.34 seconds to deallocate network for instance. [ 1961.120513] env[63279]: DEBUG nova.compute.utils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1961.125868] env[63279]: DEBUG nova.compute.manager [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1961.125868] env[63279]: DEBUG nova.network.neutron [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1961.178831] env[63279]: DEBUG nova.policy [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1112352a29ca41fb8e7afd18dac561de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebd277b3520646d8b0cb2f260d42e447', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1961.377033] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Acquiring lock "refresh_cache-df410051-d551-4a90-81f7-5630f5521a10" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1961.377033] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Acquired lock "refresh_cache-df410051-d551-4a90-81f7-5630f5521a10" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1961.377033] env[63279]: DEBUG nova.network.neutron [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1961.377033] env[63279]: DEBUG oslo_concurrency.lockutils [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Releasing lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1961.441558] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.546040] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "5b5f87cb-cf35-418f-b5bd-b953524a285c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.546134] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "5b5f87cb-cf35-418f-b5bd-b953524a285c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1961.578706] env[63279]: DEBUG nova.network.neutron [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Successfully created port: 77f48a61-90d3-4ff9-b672-52bd1900e2a2 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1961.629316] env[63279]: DEBUG nova.compute.manager [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1961.687865] env[63279]: DEBUG nova.compute.manager [req-41482820-5681-46b4-adb5-772d3cbb14b5 req-3c7d313b-9aa7-4340-8a7d-330ab6425ba5 service nova] [instance: df410051-d551-4a90-81f7-5630f5521a10] Received event network-vif-plugged-cd9181e2-70ee-4534-8313-5f40cf4f8f9c {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1961.688128] env[63279]: DEBUG oslo_concurrency.lockutils [req-41482820-5681-46b4-adb5-772d3cbb14b5 req-3c7d313b-9aa7-4340-8a7d-330ab6425ba5 service nova] Acquiring lock "df410051-d551-4a90-81f7-5630f5521a10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.688349] env[63279]: DEBUG oslo_concurrency.lockutils [req-41482820-5681-46b4-adb5-772d3cbb14b5 req-3c7d313b-9aa7-4340-8a7d-330ab6425ba5 service nova] Lock "df410051-d551-4a90-81f7-5630f5521a10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1961.688523] env[63279]: DEBUG oslo_concurrency.lockutils [req-41482820-5681-46b4-adb5-772d3cbb14b5 req-3c7d313b-9aa7-4340-8a7d-330ab6425ba5 service nova] Lock "df410051-d551-4a90-81f7-5630f5521a10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1961.688700] env[63279]: DEBUG nova.compute.manager [req-41482820-5681-46b4-adb5-772d3cbb14b5 req-3c7d313b-9aa7-4340-8a7d-330ab6425ba5 service nova] [instance: df410051-d551-4a90-81f7-5630f5521a10] No waiting events found dispatching network-vif-plugged-cd9181e2-70ee-4534-8313-5f40cf4f8f9c {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1961.688872] env[63279]: WARNING nova.compute.manager [req-41482820-5681-46b4-adb5-772d3cbb14b5 req-3c7d313b-9aa7-4340-8a7d-330ab6425ba5 service nova] [instance: df410051-d551-4a90-81f7-5630f5521a10] Received unexpected event network-vif-plugged-cd9181e2-70ee-4534-8313-5f40cf4f8f9c for instance with vm_state building and task_state spawning. [ 1961.689696] env[63279]: DEBUG nova.compute.manager [req-41482820-5681-46b4-adb5-772d3cbb14b5 req-3c7d313b-9aa7-4340-8a7d-330ab6425ba5 service nova] [instance: df410051-d551-4a90-81f7-5630f5521a10] Received event network-changed-cd9181e2-70ee-4534-8313-5f40cf4f8f9c {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1961.690041] env[63279]: DEBUG nova.compute.manager [req-41482820-5681-46b4-adb5-772d3cbb14b5 req-3c7d313b-9aa7-4340-8a7d-330ab6425ba5 service nova] [instance: df410051-d551-4a90-81f7-5630f5521a10] Refreshing instance network info cache due to event network-changed-cd9181e2-70ee-4534-8313-5f40cf4f8f9c. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1961.690380] env[63279]: DEBUG oslo_concurrency.lockutils [req-41482820-5681-46b4-adb5-772d3cbb14b5 req-3c7d313b-9aa7-4340-8a7d-330ab6425ba5 service nova] Acquiring lock "refresh_cache-df410051-d551-4a90-81f7-5630f5521a10" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1961.934787] env[63279]: DEBUG nova.network.neutron [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1962.173664] env[63279]: DEBUG nova.network.neutron [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Updating instance_info_cache with network_info: [{"id": "cd9181e2-70ee-4534-8313-5f40cf4f8f9c", "address": "fa:16:3e:50:ba:cb", "network": {"id": "c4d2f3a0-75cc-49b9-b577-c1c7bd90b05e", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1636208118-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1f262353ca64ee68a601c1269c1281d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd9181e2-70", "ovs_interfaceid": "cd9181e2-70ee-4534-8313-5f40cf4f8f9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1962.282348] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b260d69f-773b-4d63-9d65-6a4b209d319f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.291410] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ee77df-ced1-41a2-920c-91999ca5d0ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.323058] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11dafecb-bb2c-4cdb-a65d-b88ea1d8ca00 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.332429] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc69aadc-6b7a-41bb-952f-c994844ad192 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.347386] env[63279]: DEBUG nova.compute.provider_tree [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1962.406069] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Acquiring lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.406069] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.406069] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Acquiring lock "5d4909ea-396c-45ba-9ff5-acb8576150b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.406069] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Lock "5d4909ea-396c-45ba-9ff5-acb8576150b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.406369] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Lock "5d4909ea-396c-45ba-9ff5-acb8576150b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.408643] env[63279]: INFO nova.compute.manager [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Terminating instance [ 1962.647490] env[63279]: DEBUG nova.compute.manager [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1962.679935] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Releasing lock "refresh_cache-df410051-d551-4a90-81f7-5630f5521a10" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1962.680466] env[63279]: DEBUG nova.compute.manager [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Instance network_info: |[{"id": "cd9181e2-70ee-4534-8313-5f40cf4f8f9c", "address": "fa:16:3e:50:ba:cb", "network": {"id": "c4d2f3a0-75cc-49b9-b577-c1c7bd90b05e", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1636208118-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1f262353ca64ee68a601c1269c1281d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd9181e2-70", "ovs_interfaceid": "cd9181e2-70ee-4534-8313-5f40cf4f8f9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1962.680869] env[63279]: DEBUG oslo_concurrency.lockutils [req-41482820-5681-46b4-adb5-772d3cbb14b5 req-3c7d313b-9aa7-4340-8a7d-330ab6425ba5 service nova] Acquired lock "refresh_cache-df410051-d551-4a90-81f7-5630f5521a10" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1962.681105] env[63279]: DEBUG nova.network.neutron [req-41482820-5681-46b4-adb5-772d3cbb14b5 req-3c7d313b-9aa7-4340-8a7d-330ab6425ba5 service nova] [instance: df410051-d551-4a90-81f7-5630f5521a10] Refreshing network info cache for port cd9181e2-70ee-4534-8313-5f40cf4f8f9c {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1962.684331] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:ba:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '72781990-3cb3-42eb-9eb1-4040dedbf66f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd9181e2-70ee-4534-8313-5f40cf4f8f9c', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1962.697924] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Creating folder: Project (e1f262353ca64ee68a601c1269c1281d). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1962.705321] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6cb9b1d4-7ae6-44e3-bea9-2654f30f2859 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.719452] env[63279]: DEBUG nova.virt.hardware [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1962.719600] env[63279]: DEBUG nova.virt.hardware [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1962.719715] env[63279]: DEBUG nova.virt.hardware [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1962.720705] env[63279]: DEBUG nova.virt.hardware [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1962.720705] env[63279]: DEBUG nova.virt.hardware [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1962.720705] env[63279]: DEBUG nova.virt.hardware [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1962.720705] env[63279]: DEBUG nova.virt.hardware [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1962.720705] env[63279]: DEBUG nova.virt.hardware [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1962.720965] env[63279]: DEBUG nova.virt.hardware [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1962.721091] env[63279]: DEBUG nova.virt.hardware [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1962.721415] env[63279]: DEBUG nova.virt.hardware [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1962.725031] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf92005-3aca-4686-845c-6c01b5822a84 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.732079] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Created folder: Project (e1f262353ca64ee68a601c1269c1281d) in parent group-v427491. [ 1962.732327] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Creating folder: Instances. Parent ref: group-v427615. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1962.733444] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7fc1d169-09bd-462e-83f2-c3c65cec079e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.744067] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd46fcd-5b42-41ff-a671-39867460d697 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.752827] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Created folder: Instances in parent group-v427615. [ 1962.753114] env[63279]: DEBUG oslo.service.loopingcall [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1962.753666] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df410051-d551-4a90-81f7-5630f5521a10] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1962.753893] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9908001-ae15-4dee-85aa-86f76dd6415e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.783641] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1962.783641] env[63279]: value = "task-2087123" [ 1962.783641] env[63279]: _type = "Task" [ 1962.783641] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.794627] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087123, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.851023] env[63279]: DEBUG nova.scheduler.client.report [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1962.914715] env[63279]: DEBUG nova.compute.manager [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1962.914715] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1962.914715] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea52936-db01-4e19-9429-08787fd7be6a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.922671] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1962.922797] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1afd4678-41bc-4f48-8f84-985b1d8cb310 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.926050] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1962.926296] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d116ddc1-1bd4-41ba-b707-2a8cd96c8704 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.929211] env[63279]: DEBUG oslo_vmware.api [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Waiting for the task: (returnval){ [ 1962.929211] env[63279]: value = "task-2087124" [ 1962.929211] env[63279]: _type = "Task" [ 1962.929211] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.934147] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1962.934147] env[63279]: value = "task-2087125" [ 1962.934147] env[63279]: _type = "Task" [ 1962.934147] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.941226] env[63279]: DEBUG oslo_vmware.api [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2087124, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.946166] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.186810] env[63279]: DEBUG nova.compute.manager [req-5311616b-08f9-4a2e-8397-58e53229af5d req-d85a6ea4-9123-47dc-a5c9-8a871ab7d48a service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Received event network-vif-plugged-77f48a61-90d3-4ff9-b672-52bd1900e2a2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1963.187050] env[63279]: DEBUG oslo_concurrency.lockutils [req-5311616b-08f9-4a2e-8397-58e53229af5d req-d85a6ea4-9123-47dc-a5c9-8a871ab7d48a service nova] Acquiring lock "3f888d81-e73f-4486-bb64-849c873449bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.187325] env[63279]: DEBUG oslo_concurrency.lockutils [req-5311616b-08f9-4a2e-8397-58e53229af5d req-d85a6ea4-9123-47dc-a5c9-8a871ab7d48a service nova] Lock "3f888d81-e73f-4486-bb64-849c873449bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.187446] env[63279]: DEBUG oslo_concurrency.lockutils [req-5311616b-08f9-4a2e-8397-58e53229af5d req-d85a6ea4-9123-47dc-a5c9-8a871ab7d48a service nova] Lock "3f888d81-e73f-4486-bb64-849c873449bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.187842] env[63279]: DEBUG nova.compute.manager [req-5311616b-08f9-4a2e-8397-58e53229af5d req-d85a6ea4-9123-47dc-a5c9-8a871ab7d48a service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] No waiting events found dispatching network-vif-plugged-77f48a61-90d3-4ff9-b672-52bd1900e2a2 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1963.188125] env[63279]: WARNING nova.compute.manager [req-5311616b-08f9-4a2e-8397-58e53229af5d req-d85a6ea4-9123-47dc-a5c9-8a871ab7d48a service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Received unexpected event network-vif-plugged-77f48a61-90d3-4ff9-b672-52bd1900e2a2 for instance with vm_state building and task_state spawning. [ 1963.294405] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087123, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.345515] env[63279]: DEBUG nova.network.neutron [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Successfully updated port: 77f48a61-90d3-4ff9-b672-52bd1900e2a2 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1963.356852] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.746s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.357424] env[63279]: DEBUG nova.compute.manager [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1963.361299] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.006s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.361918] env[63279]: DEBUG nova.objects.instance [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lazy-loading 'resources' on Instance uuid 977fa519-2db3-4ee5-981d-c46820a8c72e {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1963.442172] env[63279]: DEBUG oslo_vmware.api [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2087124, 'name': PowerOffVM_Task, 'duration_secs': 0.225794} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.442874] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1963.443100] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1963.443364] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-926916a1-c883-4273-ade0-9a3335b93c9f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.448430] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087125, 'name': PowerOffVM_Task, 'duration_secs': 0.207538} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.449035] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1963.450955] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7354451-3e0c-4925-af4a-640631dc497c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.472624] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57130637-2d8e-49b4-8bed-bb271d7b1984 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.507287] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1963.507733] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7411986c-c673-4fe2-bbb4-089d6932045c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.514064] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1963.514064] env[63279]: value = "task-2087127" [ 1963.514064] env[63279]: _type = "Task" [ 1963.514064] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.525438] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] VM already powered off {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1963.525666] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1963.525915] env[63279]: DEBUG oslo_concurrency.lockutils [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1963.526093] env[63279]: DEBUG oslo_concurrency.lockutils [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1963.526277] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1963.526524] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27a28def-efab-44b1-a525-35933536b793 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.535264] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1963.535466] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1963.536205] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a28252f3-a570-49e3-a245-4e91663adf41 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.541464] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1963.541464] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52584c6b-5698-ca08-56b5-1c71e3b2c9c3" [ 1963.541464] env[63279]: _type = "Task" [ 1963.541464] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.549396] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52584c6b-5698-ca08-56b5-1c71e3b2c9c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.563908] env[63279]: DEBUG nova.network.neutron [req-41482820-5681-46b4-adb5-772d3cbb14b5 req-3c7d313b-9aa7-4340-8a7d-330ab6425ba5 service nova] [instance: df410051-d551-4a90-81f7-5630f5521a10] Updated VIF entry in instance network info cache for port cd9181e2-70ee-4534-8313-5f40cf4f8f9c. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1963.564308] env[63279]: DEBUG nova.network.neutron [req-41482820-5681-46b4-adb5-772d3cbb14b5 req-3c7d313b-9aa7-4340-8a7d-330ab6425ba5 service nova] [instance: df410051-d551-4a90-81f7-5630f5521a10] Updating instance_info_cache with network_info: [{"id": "cd9181e2-70ee-4534-8313-5f40cf4f8f9c", "address": "fa:16:3e:50:ba:cb", "network": {"id": "c4d2f3a0-75cc-49b9-b577-c1c7bd90b05e", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1636208118-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1f262353ca64ee68a601c1269c1281d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd9181e2-70", "ovs_interfaceid": "cd9181e2-70ee-4534-8313-5f40cf4f8f9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1963.586519] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1963.586755] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1963.586943] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Deleting the datastore file [datastore1] 5d4909ea-396c-45ba-9ff5-acb8576150b3 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1963.587226] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc8a3a1d-237f-44bd-8450-033c8fed2a2b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.593561] env[63279]: DEBUG oslo_vmware.api [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Waiting for the task: (returnval){ [ 1963.593561] env[63279]: value = "task-2087128" [ 1963.593561] env[63279]: _type = "Task" [ 1963.593561] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.602704] env[63279]: DEBUG oslo_vmware.api [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2087128, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.795060] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087123, 'name': CreateVM_Task, 'duration_secs': 0.674098} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.795299] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df410051-d551-4a90-81f7-5630f5521a10] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1963.796068] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1963.796315] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1963.796699] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1963.796958] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a61ee5c-2fe8-4503-8a84-4a5775b1c9e9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.801737] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Waiting for the task: (returnval){ [ 1963.801737] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52872750-5a5b-3041-9fa3-0c77f5e82345" [ 1963.801737] env[63279]: _type = "Task" [ 1963.801737] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.809589] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52872750-5a5b-3041-9fa3-0c77f5e82345, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.851234] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Acquiring lock "refresh_cache-3f888d81-e73f-4486-bb64-849c873449bf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1963.851234] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Acquired lock "refresh_cache-3f888d81-e73f-4486-bb64-849c873449bf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1963.851234] env[63279]: DEBUG nova.network.neutron [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1963.864107] env[63279]: DEBUG nova.compute.utils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1963.865538] env[63279]: DEBUG nova.compute.manager [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1963.865861] env[63279]: DEBUG nova.network.neutron [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1963.921777] env[63279]: DEBUG nova.policy [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb8bd240a67546a39cb881f1f72e424d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a6b8a538f6414274b16aa6bb9452bd7f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1964.054628] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52584c6b-5698-ca08-56b5-1c71e3b2c9c3, 'name': SearchDatastore_Task, 'duration_secs': 0.008732} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.056289] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74e29fcd-1481-4ba1-878a-f7bff4070fd7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.065022] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1964.065022] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5217443e-c1dd-cbf1-192b-f6ffb5a710fd" [ 1964.065022] env[63279]: _type = "Task" [ 1964.065022] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.066829] env[63279]: DEBUG oslo_concurrency.lockutils [req-41482820-5681-46b4-adb5-772d3cbb14b5 req-3c7d313b-9aa7-4340-8a7d-330ab6425ba5 service nova] Releasing lock "refresh_cache-df410051-d551-4a90-81f7-5630f5521a10" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1964.070862] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5217443e-c1dd-cbf1-192b-f6ffb5a710fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.105030] env[63279]: DEBUG oslo_vmware.api [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Task: {'id': task-2087128, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138811} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.107954] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1964.108204] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1964.108487] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1964.108734] env[63279]: INFO nova.compute.manager [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1964.109243] env[63279]: DEBUG oslo.service.loopingcall [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1964.109519] env[63279]: DEBUG nova.compute.manager [-] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1964.110039] env[63279]: DEBUG nova.network.neutron [-] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1964.245966] env[63279]: DEBUG nova.network.neutron [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Successfully created port: 13f38e75-c220-4029-974f-96f6308c35c6 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1964.313265] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52872750-5a5b-3041-9fa3-0c77f5e82345, 'name': SearchDatastore_Task, 'duration_secs': 0.008861} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.313621] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1964.313875] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1964.314137] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1964.371545] env[63279]: DEBUG nova.compute.manager [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1964.415804] env[63279]: DEBUG nova.network.neutron [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1964.469184] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc807e8-adc9-4051-89e1-1ae9e5eb57a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.478207] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea71a6b-8fea-4d5f-abc8-e7da49ac0862 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.520183] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28584c69-6a72-4d5b-9ba4-6d34a620aedf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.530172] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5649163e-99bc-49df-b497-58c26ef15c92 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.544194] env[63279]: DEBUG nova.compute.provider_tree [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1964.574627] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5217443e-c1dd-cbf1-192b-f6ffb5a710fd, 'name': SearchDatastore_Task, 'duration_secs': 0.033406} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.574928] env[63279]: DEBUG oslo_concurrency.lockutils [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1964.575212] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 044335c7-ce3b-4b4a-b1dc-8b9acec538b4/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk. {{(pid=63279) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1964.575520] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1964.575709] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1964.576165] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-670ba9fd-d4c9-4645-ae13-ef7f4af56a3d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.578183] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ade8fc34-cf1a-4c21-a0b9-fdeb7917f2e6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.584863] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1964.584863] env[63279]: value = "task-2087129" [ 1964.584863] env[63279]: _type = "Task" [ 1964.584863] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.590252] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1964.590252] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1964.591038] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa70d62a-8a78-4f63-9414-257de1ef8ffd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.597228] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087129, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.600219] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Waiting for the task: (returnval){ [ 1964.600219] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5266f9cf-4a3e-24d8-6315-88714b289e90" [ 1964.600219] env[63279]: _type = "Task" [ 1964.600219] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.608056] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5266f9cf-4a3e-24d8-6315-88714b289e90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.749165] env[63279]: DEBUG nova.network.neutron [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Updating instance_info_cache with network_info: [{"id": "77f48a61-90d3-4ff9-b672-52bd1900e2a2", "address": "fa:16:3e:cb:ca:1a", "network": {"id": "c4927c75-f2ae-4879-8ca5-ffbe46e14c4d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-110838540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebd277b3520646d8b0cb2f260d42e447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77f48a61-90", "ovs_interfaceid": "77f48a61-90d3-4ff9-b672-52bd1900e2a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1965.073906] env[63279]: ERROR nova.scheduler.client.report [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [req-12c58c94-6b9e-4b5a-b8b1-169f54ea754c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-12c58c94-6b9e-4b5a-b8b1-169f54ea754c"}]} [ 1965.095177] env[63279]: DEBUG nova.scheduler.client.report [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1965.103383] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087129, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481456} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.106930] env[63279]: INFO nova.virt.vmwareapi.ds_util [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 044335c7-ce3b-4b4a-b1dc-8b9acec538b4/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk. [ 1965.107990] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e8f8c5-3d70-4b15-816d-f599fc05ec9e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.111250] env[63279]: DEBUG nova.scheduler.client.report [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1965.111465] env[63279]: DEBUG nova.compute.provider_tree [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1965.122542] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5266f9cf-4a3e-24d8-6315-88714b289e90, 'name': SearchDatastore_Task, 'duration_secs': 0.014399} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.143690] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 044335c7-ce3b-4b4a-b1dc-8b9acec538b4/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1965.144898] env[63279]: DEBUG nova.scheduler.client.report [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1965.146988] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3dbc091-89da-486d-8a3d-ecce25663fe9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.149427] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb48e865-e607-4ea2-893c-f36055665689 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.168450] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Waiting for the task: (returnval){ [ 1965.168450] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]524f1f93-13c7-5d5d-e9d4-e1d743be75d7" [ 1965.168450] env[63279]: _type = "Task" [ 1965.168450] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.169927] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1965.169927] env[63279]: value = "task-2087130" [ 1965.169927] env[63279]: _type = "Task" [ 1965.169927] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.182365] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524f1f93-13c7-5d5d-e9d4-e1d743be75d7, 'name': SearchDatastore_Task, 'duration_secs': 0.010032} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.188455] env[63279]: DEBUG nova.scheduler.client.report [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1965.191698] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1965.191957] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] df410051-d551-4a90-81f7-5630f5521a10/df410051-d551-4a90-81f7-5630f5521a10.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1965.192565] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087130, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.193342] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3fa30085-5353-47c1-b704-142b7db6478a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.199961] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Waiting for the task: (returnval){ [ 1965.199961] env[63279]: value = "task-2087131" [ 1965.199961] env[63279]: _type = "Task" [ 1965.199961] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.208549] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087131, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.254114] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Releasing lock "refresh_cache-3f888d81-e73f-4486-bb64-849c873449bf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1965.254114] env[63279]: DEBUG nova.compute.manager [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Instance network_info: |[{"id": "77f48a61-90d3-4ff9-b672-52bd1900e2a2", "address": "fa:16:3e:cb:ca:1a", "network": {"id": "c4927c75-f2ae-4879-8ca5-ffbe46e14c4d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-110838540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebd277b3520646d8b0cb2f260d42e447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77f48a61-90", "ovs_interfaceid": "77f48a61-90d3-4ff9-b672-52bd1900e2a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1965.254481] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:ca:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e27fd35-1d7b-4358-92d5-4d34da27b992', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77f48a61-90d3-4ff9-b672-52bd1900e2a2', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1965.260688] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Creating folder: Project (ebd277b3520646d8b0cb2f260d42e447). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1965.261090] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-472584d3-babd-42ca-830d-201619ead5ba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.273924] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Created folder: Project (ebd277b3520646d8b0cb2f260d42e447) in parent group-v427491. [ 1965.274150] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Creating folder: Instances. Parent ref: group-v427618. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1965.274429] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e500e2f4-8da2-4986-8c86-62100ea7baea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.284595] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Created folder: Instances in parent group-v427618. [ 1965.284595] env[63279]: DEBUG oslo.service.loopingcall [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1965.284851] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1965.284851] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24a9c9ba-9826-4060-bc18-274685101d30 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.307837] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1965.307837] env[63279]: value = "task-2087134" [ 1965.307837] env[63279]: _type = "Task" [ 1965.307837] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.317301] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087134, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.387205] env[63279]: DEBUG nova.compute.manager [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1965.396154] env[63279]: DEBUG nova.compute.manager [req-e5e113a2-9b86-42a6-b45a-d3b5f33bf3fe req-20e74887-49bb-439f-b721-25abf84334fa service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Received event network-changed-77f48a61-90d3-4ff9-b672-52bd1900e2a2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1965.396154] env[63279]: DEBUG nova.compute.manager [req-e5e113a2-9b86-42a6-b45a-d3b5f33bf3fe req-20e74887-49bb-439f-b721-25abf84334fa service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Refreshing instance network info cache due to event network-changed-77f48a61-90d3-4ff9-b672-52bd1900e2a2. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1965.396154] env[63279]: DEBUG oslo_concurrency.lockutils [req-e5e113a2-9b86-42a6-b45a-d3b5f33bf3fe req-20e74887-49bb-439f-b721-25abf84334fa service nova] Acquiring lock "refresh_cache-3f888d81-e73f-4486-bb64-849c873449bf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1965.397106] env[63279]: DEBUG oslo_concurrency.lockutils [req-e5e113a2-9b86-42a6-b45a-d3b5f33bf3fe req-20e74887-49bb-439f-b721-25abf84334fa service nova] Acquired lock "refresh_cache-3f888d81-e73f-4486-bb64-849c873449bf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1965.397106] env[63279]: DEBUG nova.network.neutron [req-e5e113a2-9b86-42a6-b45a-d3b5f33bf3fe req-20e74887-49bb-439f-b721-25abf84334fa service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Refreshing network info cache for port 77f48a61-90d3-4ff9-b672-52bd1900e2a2 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1965.417022] env[63279]: DEBUG nova.virt.hardware [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1965.417313] env[63279]: DEBUG nova.virt.hardware [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1965.417477] env[63279]: DEBUG nova.virt.hardware [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1965.417702] env[63279]: DEBUG nova.virt.hardware [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1965.417948] env[63279]: DEBUG nova.virt.hardware [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1965.418149] env[63279]: DEBUG nova.virt.hardware [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1965.418457] env[63279]: DEBUG nova.virt.hardware [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1965.418544] env[63279]: DEBUG nova.virt.hardware [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1965.418795] env[63279]: DEBUG nova.virt.hardware [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1965.418918] env[63279]: DEBUG nova.virt.hardware [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1965.419131] env[63279]: DEBUG nova.virt.hardware [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1965.420614] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18fa1676-0c2f-40db-854e-e2af042bb2a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.432712] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a937e64c-9da5-45c6-8b4d-903efcc09882 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.516930] env[63279]: DEBUG nova.network.neutron [-] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1965.683691] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087130, 'name': ReconfigVM_Task, 'duration_secs': 0.421975} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.684305] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 044335c7-ce3b-4b4a-b1dc-8b9acec538b4/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1965.684930] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379a961d-8de1-4f6c-80ca-d71e6443b3fc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.719244] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ccca47b-11af-4298-a31a-7c5b5e0e3960 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.735486] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087131, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484509} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.736809] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] df410051-d551-4a90-81f7-5630f5521a10/df410051-d551-4a90-81f7-5630f5521a10.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1965.737058] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1965.737361] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1965.737361] env[63279]: value = "task-2087135" [ 1965.737361] env[63279]: _type = "Task" [ 1965.737361] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.737553] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a06b449-afad-468c-b948-c37ee886abb7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.750023] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087135, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.751265] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Waiting for the task: (returnval){ [ 1965.751265] env[63279]: value = "task-2087136" [ 1965.751265] env[63279]: _type = "Task" [ 1965.751265] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.759638] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087136, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.817359] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087134, 'name': CreateVM_Task, 'duration_secs': 0.467334} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.820015] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1965.821051] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1965.821145] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1965.821429] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1965.821703] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de0eae86-7989-4616-9265-b28800cea166 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.826528] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Waiting for the task: (returnval){ [ 1965.826528] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52233e35-f4f9-55e7-9038-473988ba0088" [ 1965.826528] env[63279]: _type = "Task" [ 1965.826528] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.836721] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52233e35-f4f9-55e7-9038-473988ba0088, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.858017] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba612718-415e-4f46-84e8-071b15f0b6ba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.865244] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333d1d8a-ca58-448c-9d78-8590b0dcfd68 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.897110] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55baafc7-a038-445b-a986-52d93dc06bed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.907279] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7f5fc4-a3c2-4816-a561-ce3395530c07 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.921089] env[63279]: DEBUG nova.compute.provider_tree [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1965.983020] env[63279]: DEBUG nova.compute.manager [req-7f48a6cb-f8e3-436b-a655-84067f3347ee req-afe3cc0e-fc0a-4264-ae43-1b214cb7edde service nova] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Received event network-vif-plugged-13f38e75-c220-4029-974f-96f6308c35c6 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1965.983020] env[63279]: DEBUG oslo_concurrency.lockutils [req-7f48a6cb-f8e3-436b-a655-84067f3347ee req-afe3cc0e-fc0a-4264-ae43-1b214cb7edde service nova] Acquiring lock "ba2d6111-d93d-4216-b641-864b542ea253-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.983020] env[63279]: DEBUG oslo_concurrency.lockutils [req-7f48a6cb-f8e3-436b-a655-84067f3347ee req-afe3cc0e-fc0a-4264-ae43-1b214cb7edde service nova] Lock "ba2d6111-d93d-4216-b641-864b542ea253-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1965.983020] env[63279]: DEBUG oslo_concurrency.lockutils [req-7f48a6cb-f8e3-436b-a655-84067f3347ee req-afe3cc0e-fc0a-4264-ae43-1b214cb7edde service nova] Lock "ba2d6111-d93d-4216-b641-864b542ea253-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1965.983020] env[63279]: DEBUG nova.compute.manager [req-7f48a6cb-f8e3-436b-a655-84067f3347ee req-afe3cc0e-fc0a-4264-ae43-1b214cb7edde service nova] [instance: ba2d6111-d93d-4216-b641-864b542ea253] No waiting events found dispatching network-vif-plugged-13f38e75-c220-4029-974f-96f6308c35c6 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1965.983537] env[63279]: WARNING nova.compute.manager [req-7f48a6cb-f8e3-436b-a655-84067f3347ee req-afe3cc0e-fc0a-4264-ae43-1b214cb7edde service nova] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Received unexpected event network-vif-plugged-13f38e75-c220-4029-974f-96f6308c35c6 for instance with vm_state building and task_state spawning. [ 1966.021412] env[63279]: INFO nova.compute.manager [-] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Took 1.91 seconds to deallocate network for instance. [ 1966.115363] env[63279]: DEBUG nova.network.neutron [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Successfully updated port: 13f38e75-c220-4029-974f-96f6308c35c6 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1966.186538] env[63279]: DEBUG nova.network.neutron [req-e5e113a2-9b86-42a6-b45a-d3b5f33bf3fe req-20e74887-49bb-439f-b721-25abf84334fa service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Updated VIF entry in instance network info cache for port 77f48a61-90d3-4ff9-b672-52bd1900e2a2. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1966.186968] env[63279]: DEBUG nova.network.neutron [req-e5e113a2-9b86-42a6-b45a-d3b5f33bf3fe req-20e74887-49bb-439f-b721-25abf84334fa service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Updating instance_info_cache with network_info: [{"id": "77f48a61-90d3-4ff9-b672-52bd1900e2a2", "address": "fa:16:3e:cb:ca:1a", "network": {"id": "c4927c75-f2ae-4879-8ca5-ffbe46e14c4d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-110838540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebd277b3520646d8b0cb2f260d42e447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77f48a61-90", "ovs_interfaceid": "77f48a61-90d3-4ff9-b672-52bd1900e2a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1966.254023] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087135, 'name': ReconfigVM_Task, 'duration_secs': 0.166679} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.254023] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1966.254023] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d23783ae-39d1-4e78-a372-faa1e1381692 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.263218] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087136, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.264914] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1966.264914] env[63279]: value = "task-2087137" [ 1966.264914] env[63279]: _type = "Task" [ 1966.264914] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.273905] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087137, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.339033] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52233e35-f4f9-55e7-9038-473988ba0088, 'name': SearchDatastore_Task, 'duration_secs': 0.009642} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.339998] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.339998] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1966.340774] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1966.340774] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1966.340774] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1966.340936] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-652d9539-a372-46ef-96d8-7d83495b14b1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.350261] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1966.350496] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1966.351295] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c5d51a6-5629-4998-9e38-b981025ef544 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.357249] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Waiting for the task: (returnval){ [ 1966.357249] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]525064f0-8cef-d382-cf17-6d046e8bbd02" [ 1966.357249] env[63279]: _type = "Task" [ 1966.357249] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.365388] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525064f0-8cef-d382-cf17-6d046e8bbd02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.423894] env[63279]: DEBUG nova.scheduler.client.report [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1966.528221] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1966.618878] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Acquiring lock "refresh_cache-ba2d6111-d93d-4216-b641-864b542ea253" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1966.618878] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Acquired lock "refresh_cache-ba2d6111-d93d-4216-b641-864b542ea253" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1966.619087] env[63279]: DEBUG nova.network.neutron [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1966.690511] env[63279]: DEBUG oslo_concurrency.lockutils [req-e5e113a2-9b86-42a6-b45a-d3b5f33bf3fe req-20e74887-49bb-439f-b721-25abf84334fa service nova] Releasing lock "refresh_cache-3f888d81-e73f-4486-bb64-849c873449bf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.690950] env[63279]: DEBUG nova.compute.manager [req-e5e113a2-9b86-42a6-b45a-d3b5f33bf3fe req-20e74887-49bb-439f-b721-25abf84334fa service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Received event network-vif-deleted-887ee4a3-0844-4571-84d8-486d7c21a7d2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1966.691163] env[63279]: INFO nova.compute.manager [req-e5e113a2-9b86-42a6-b45a-d3b5f33bf3fe req-20e74887-49bb-439f-b721-25abf84334fa service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Neutron deleted interface 887ee4a3-0844-4571-84d8-486d7c21a7d2; detaching it from the instance and deleting it from the info cache [ 1966.691370] env[63279]: DEBUG nova.network.neutron [req-e5e113a2-9b86-42a6-b45a-d3b5f33bf3fe req-20e74887-49bb-439f-b721-25abf84334fa service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1966.762982] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087136, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.998838} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.762982] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1966.763870] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80ad65a-6b74-435f-8ea1-0f882a129d9d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.774231] env[63279]: DEBUG oslo_vmware.api [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087137, 'name': PowerOnVM_Task, 'duration_secs': 0.395208} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.783430] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1966.794935] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] df410051-d551-4a90-81f7-5630f5521a10/df410051-d551-4a90-81f7-5630f5521a10.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1966.796285] env[63279]: DEBUG nova.compute.manager [None req-67b75232-018a-4ecc-9a77-28f4b0247c05 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1966.796550] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbe4b9e0-f3d1-4e30-b348-92289ecc0c98 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.811418] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e194cd9-a5cc-4371-b99b-9f5fd568ddbe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.821532] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Waiting for the task: (returnval){ [ 1966.821532] env[63279]: value = "task-2087138" [ 1966.821532] env[63279]: _type = "Task" [ 1966.821532] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.852161] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5299020e-2a87-4b32-3866-0e89e96f687e/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1966.853054] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4194f74f-5622-4e8b-a2f8-b8eb28b9b17c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.862440] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5299020e-2a87-4b32-3866-0e89e96f687e/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1966.863490] env[63279]: ERROR oslo_vmware.rw_handles [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5299020e-2a87-4b32-3866-0e89e96f687e/disk-0.vmdk due to incomplete transfer. [ 1966.863490] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3cca739d-6703-451c-ae52-c5e30cfe985c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.870755] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525064f0-8cef-d382-cf17-6d046e8bbd02, 'name': SearchDatastore_Task, 'duration_secs': 0.009101} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.872151] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5299020e-2a87-4b32-3866-0e89e96f687e/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1966.872355] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Uploaded image c12b287c-ce13-4bd1-a83d-94dad39d2a94 to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1966.874412] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1966.874650] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20eb1b6f-62d5-4adf-bb4a-b2e7e916337d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.876767] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2e56cb26-72cc-4f9b-92db-a17b5428030c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.881177] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Waiting for the task: (returnval){ [ 1966.881177] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52caf1a3-5bcc-97dc-ca01-9eda346924a5" [ 1966.881177] env[63279]: _type = "Task" [ 1966.881177] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.882387] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1966.882387] env[63279]: value = "task-2087139" [ 1966.882387] env[63279]: _type = "Task" [ 1966.882387] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.893200] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52caf1a3-5bcc-97dc-ca01-9eda346924a5, 'name': SearchDatastore_Task, 'duration_secs': 0.008627} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.896208] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.896398] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 3f888d81-e73f-4486-bb64-849c873449bf/3f888d81-e73f-4486-bb64-849c873449bf.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1966.897524] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087139, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.897524] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93e7e129-708d-4fb9-a319-bd4c63d374be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.903245] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Waiting for the task: (returnval){ [ 1966.903245] env[63279]: value = "task-2087140" [ 1966.903245] env[63279]: _type = "Task" [ 1966.903245] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.910476] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087140, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.929522] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.568s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.932267] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.636s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1966.933813] env[63279]: INFO nova.compute.claims [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1966.955372] env[63279]: INFO nova.scheduler.client.report [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleted allocations for instance 977fa519-2db3-4ee5-981d-c46820a8c72e [ 1967.156919] env[63279]: DEBUG nova.network.neutron [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1967.194224] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5f5b2a80-8b2b-490e-9d99-58af3791880e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.204387] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f93e856-fcb2-47fa-aed1-1ab27c51600a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.244561] env[63279]: DEBUG nova.compute.manager [req-e5e113a2-9b86-42a6-b45a-d3b5f33bf3fe req-20e74887-49bb-439f-b721-25abf84334fa service nova] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Detach interface failed, port_id=887ee4a3-0844-4571-84d8-486d7c21a7d2, reason: Instance 5d4909ea-396c-45ba-9ff5-acb8576150b3 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 1967.340711] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087138, 'name': ReconfigVM_Task, 'duration_secs': 0.348518} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.344990] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Reconfigured VM instance instance-00000029 to attach disk [datastore1] df410051-d551-4a90-81f7-5630f5521a10/df410051-d551-4a90-81f7-5630f5521a10.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1967.346423] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37d3b2bd-7a51-4833-baf2-3291b732cb7e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.355007] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Waiting for the task: (returnval){ [ 1967.355007] env[63279]: value = "task-2087141" [ 1967.355007] env[63279]: _type = "Task" [ 1967.355007] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.373455] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087141, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.392610] env[63279]: DEBUG nova.network.neutron [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Updating instance_info_cache with network_info: [{"id": "13f38e75-c220-4029-974f-96f6308c35c6", "address": "fa:16:3e:47:2e:10", "network": {"id": "6592145f-9e2b-4195-81a7-8566426d72f6", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-241408154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6b8a538f6414274b16aa6bb9452bd7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13f38e75-c2", "ovs_interfaceid": "13f38e75-c220-4029-974f-96f6308c35c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.397128] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087139, 'name': Destroy_Task, 'duration_secs': 0.429444} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.397438] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Destroyed the VM [ 1967.397693] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1967.397947] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8d8eed7b-14b8-4169-b80a-13a0f5033834 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.405038] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1967.405038] env[63279]: value = "task-2087142" [ 1967.405038] env[63279]: _type = "Task" [ 1967.405038] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.422429] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087142, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.423021] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087140, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488214} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.423328] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 3f888d81-e73f-4486-bb64-849c873449bf/3f888d81-e73f-4486-bb64-849c873449bf.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1967.423594] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1967.423889] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c6e36ebb-2d8c-474e-aa1f-c479608d02bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.433493] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Waiting for the task: (returnval){ [ 1967.433493] env[63279]: value = "task-2087143" [ 1967.433493] env[63279]: _type = "Task" [ 1967.433493] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.445088] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087143, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.464433] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bd29b348-af39-4f7a-bdf6-c730291d8081 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "977fa519-2db3-4ee5-981d-c46820a8c72e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.865s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.865381] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087141, 'name': Rename_Task, 'duration_secs': 0.150199} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.865692] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1967.865960] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f4bb4d8e-9990-438a-a1b2-125b22bf4b80 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.873050] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Waiting for the task: (returnval){ [ 1967.873050] env[63279]: value = "task-2087144" [ 1967.873050] env[63279]: _type = "Task" [ 1967.873050] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.880345] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087144, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.898046] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Releasing lock "refresh_cache-ba2d6111-d93d-4216-b641-864b542ea253" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1967.898389] env[63279]: DEBUG nova.compute.manager [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Instance network_info: |[{"id": "13f38e75-c220-4029-974f-96f6308c35c6", "address": "fa:16:3e:47:2e:10", "network": {"id": "6592145f-9e2b-4195-81a7-8566426d72f6", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-241408154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6b8a538f6414274b16aa6bb9452bd7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13f38e75-c2", "ovs_interfaceid": "13f38e75-c220-4029-974f-96f6308c35c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1967.898843] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:2e:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4df917f7-847a-4c0e-b0e3-69a52e4a1554', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13f38e75-c220-4029-974f-96f6308c35c6', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1967.906126] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Creating folder: Project (a6b8a538f6414274b16aa6bb9452bd7f). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1967.906428] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e934c78-c9cf-46d0-86a7-578b7a8ddd46 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.917578] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087142, 'name': RemoveSnapshot_Task} progress is 30%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.919992] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Created folder: Project (a6b8a538f6414274b16aa6bb9452bd7f) in parent group-v427491. [ 1967.919992] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Creating folder: Instances. Parent ref: group-v427621. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1967.919992] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ab9a253-2354-4de8-b6e4-f8802df2b67c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.929123] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Created folder: Instances in parent group-v427621. [ 1967.929123] env[63279]: DEBUG oslo.service.loopingcall [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1967.929123] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1967.929282] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b56c57bc-2531-4cde-a11b-5ab1f172ea4b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.950838] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1967.950838] env[63279]: value = "task-2087147" [ 1967.950838] env[63279]: _type = "Task" [ 1967.950838] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.957587] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087143, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076593} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.957801] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1967.958590] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf87ea1-8b19-4f2e-9cf7-64ea26d3149a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.964028] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087147, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.985460] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 3f888d81-e73f-4486-bb64-849c873449bf/3f888d81-e73f-4486-bb64-849c873449bf.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1967.989030] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3565010e-e9d6-4620-b7b7-3ee5b9039c76 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.008754] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Waiting for the task: (returnval){ [ 1968.008754] env[63279]: value = "task-2087148" [ 1968.008754] env[63279]: _type = "Task" [ 1968.008754] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.020100] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087148, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.264868] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "7db0c32d-36a4-4452-bb07-06de0c93ab50" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.265262] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "7db0c32d-36a4-4452-bb07-06de0c93ab50" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.343657] env[63279]: DEBUG nova.compute.manager [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Received event network-changed-13f38e75-c220-4029-974f-96f6308c35c6 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1968.343890] env[63279]: DEBUG nova.compute.manager [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Refreshing instance network info cache due to event network-changed-13f38e75-c220-4029-974f-96f6308c35c6. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1968.344215] env[63279]: DEBUG oslo_concurrency.lockutils [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] Acquiring lock "refresh_cache-ba2d6111-d93d-4216-b641-864b542ea253" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1968.344441] env[63279]: DEBUG oslo_concurrency.lockutils [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] Acquired lock "refresh_cache-ba2d6111-d93d-4216-b641-864b542ea253" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1968.344674] env[63279]: DEBUG nova.network.neutron [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Refreshing network info cache for port 13f38e75-c220-4029-974f-96f6308c35c6 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1968.385458] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087144, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.424709] env[63279]: DEBUG oslo_vmware.api [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087142, 'name': RemoveSnapshot_Task, 'duration_secs': 0.540723} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.425535] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1968.425762] env[63279]: INFO nova.compute.manager [None req-2bc8ba72-0021-44dc-a431-f9b77e745975 tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Took 13.69 seconds to snapshot the instance on the hypervisor. [ 1968.429302] env[63279]: DEBUG nova.compute.manager [req-49828c34-060c-414e-aeab-00f71ae7f194 req-79bb4cfd-aec8-48e9-992d-e4a5a5a54ef2 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Received event network-changed-8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1968.429579] env[63279]: DEBUG nova.compute.manager [req-49828c34-060c-414e-aeab-00f71ae7f194 req-79bb4cfd-aec8-48e9-992d-e4a5a5a54ef2 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Refreshing instance network info cache due to event network-changed-8aa70697-1a19-4867-93bc-f264c16829ce. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1968.429824] env[63279]: DEBUG oslo_concurrency.lockutils [req-49828c34-060c-414e-aeab-00f71ae7f194 req-79bb4cfd-aec8-48e9-992d-e4a5a5a54ef2 service nova] Acquiring lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1968.429978] env[63279]: DEBUG oslo_concurrency.lockutils [req-49828c34-060c-414e-aeab-00f71ae7f194 req-79bb4cfd-aec8-48e9-992d-e4a5a5a54ef2 service nova] Acquired lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1968.430170] env[63279]: DEBUG nova.network.neutron [req-49828c34-060c-414e-aeab-00f71ae7f194 req-79bb4cfd-aec8-48e9-992d-e4a5a5a54ef2 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Refreshing network info cache for port 8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1968.463648] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087147, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.518813] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087148, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.524910] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd6163a-93ab-49ed-9d59-f38846c0ab47 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.532851] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64069290-35a6-44ad-a53a-e8fde6cab60d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.564206] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb979e0-c29d-412c-acb4-64442098a481 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.573137] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a5b661-8545-4ac2-827b-a3aa04b152dd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.587125] env[63279]: DEBUG nova.compute.provider_tree [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1968.884103] env[63279]: DEBUG oslo_vmware.api [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087144, 'name': PowerOnVM_Task, 'duration_secs': 0.635281} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.884418] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1968.884626] env[63279]: INFO nova.compute.manager [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Took 9.03 seconds to spawn the instance on the hypervisor. [ 1968.884808] env[63279]: DEBUG nova.compute.manager [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1968.885620] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac2f8f7-e4bd-441b-836e-f7db7bf2233d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.963965] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087147, 'name': CreateVM_Task, 'duration_secs': 0.625978} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.964164] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1968.964836] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1968.964992] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1968.965325] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1968.965845] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1f69659-a930-490b-ba30-6c137c9ccd1c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.971027] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Waiting for the task: (returnval){ [ 1968.971027] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5234776f-da4a-8221-1a7c-29c8688064da" [ 1968.971027] env[63279]: _type = "Task" [ 1968.971027] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.983405] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5234776f-da4a-8221-1a7c-29c8688064da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.020455] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087148, 'name': ReconfigVM_Task, 'duration_secs': 0.536638} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.023072] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 3f888d81-e73f-4486-bb64-849c873449bf/3f888d81-e73f-4486-bb64-849c873449bf.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1969.023820] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bcd0259d-8839-4a48-bf19-89914c7821b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.033619] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Waiting for the task: (returnval){ [ 1969.033619] env[63279]: value = "task-2087149" [ 1969.033619] env[63279]: _type = "Task" [ 1969.033619] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.041994] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087149, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.092102] env[63279]: DEBUG nova.scheduler.client.report [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1969.159323] env[63279]: DEBUG nova.network.neutron [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Updated VIF entry in instance network info cache for port 13f38e75-c220-4029-974f-96f6308c35c6. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1969.159643] env[63279]: DEBUG nova.network.neutron [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Updating instance_info_cache with network_info: [{"id": "13f38e75-c220-4029-974f-96f6308c35c6", "address": "fa:16:3e:47:2e:10", "network": {"id": "6592145f-9e2b-4195-81a7-8566426d72f6", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-241408154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6b8a538f6414274b16aa6bb9452bd7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13f38e75-c2", "ovs_interfaceid": "13f38e75-c220-4029-974f-96f6308c35c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1969.200433] env[63279]: DEBUG nova.network.neutron [req-49828c34-060c-414e-aeab-00f71ae7f194 req-79bb4cfd-aec8-48e9-992d-e4a5a5a54ef2 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Updated VIF entry in instance network info cache for port 8aa70697-1a19-4867-93bc-f264c16829ce. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1969.200786] env[63279]: DEBUG nova.network.neutron [req-49828c34-060c-414e-aeab-00f71ae7f194 req-79bb4cfd-aec8-48e9-992d-e4a5a5a54ef2 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Updating instance_info_cache with network_info: [{"id": "8aa70697-1a19-4867-93bc-f264c16829ce", "address": "fa:16:3e:27:56:a2", "network": {"id": "d065f991-0cec-4435-b5a1-ec50614f5486", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-780609743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ea8d4af9e9b4df8930ef52450936dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aa70697-1a", "ovs_interfaceid": "8aa70697-1a19-4867-93bc-f264c16829ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1969.408439] env[63279]: INFO nova.compute.manager [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Took 40.38 seconds to build instance. [ 1969.484020] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5234776f-da4a-8221-1a7c-29c8688064da, 'name': SearchDatastore_Task, 'duration_secs': 0.012282} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.484020] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1969.484020] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1969.484020] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.484292] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.484292] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1969.484292] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0997b59d-3c17-424c-8e87-ee9b64baab35 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.494170] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1969.494170] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1969.494170] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e9288bd-51d8-4a80-84ca-c3ffd8e0ba15 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.502022] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Waiting for the task: (returnval){ [ 1969.502022] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]528053fa-a9ba-d1dd-7ce8-67600aad31f1" [ 1969.502022] env[63279]: _type = "Task" [ 1969.502022] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.507278] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]528053fa-a9ba-d1dd-7ce8-67600aad31f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.543618] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087149, 'name': Rename_Task, 'duration_secs': 0.344367} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.544093] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1969.544457] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a2f6d3d-bf26-411e-835b-094d82ae897b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.550786] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Waiting for the task: (returnval){ [ 1969.550786] env[63279]: value = "task-2087150" [ 1969.550786] env[63279]: _type = "Task" [ 1969.550786] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.559238] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087150, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.601638] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.669s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.602899] env[63279]: DEBUG nova.compute.manager [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1969.605758] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.293s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.607503] env[63279]: INFO nova.compute.claims [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1969.665500] env[63279]: DEBUG oslo_concurrency.lockutils [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] Releasing lock "refresh_cache-ba2d6111-d93d-4216-b641-864b542ea253" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1969.665829] env[63279]: DEBUG nova.compute.manager [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Received event network-changed-8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1969.666034] env[63279]: DEBUG nova.compute.manager [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Refreshing instance network info cache due to event network-changed-8aa70697-1a19-4867-93bc-f264c16829ce. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1969.666245] env[63279]: DEBUG oslo_concurrency.lockutils [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] Acquiring lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.702971] env[63279]: DEBUG oslo_concurrency.lockutils [req-49828c34-060c-414e-aeab-00f71ae7f194 req-79bb4cfd-aec8-48e9-992d-e4a5a5a54ef2 service nova] Releasing lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1969.703441] env[63279]: DEBUG oslo_concurrency.lockutils [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] Acquired lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.703647] env[63279]: DEBUG nova.network.neutron [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Refreshing network info cache for port 8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1969.911087] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9711dcd5-32f1-47ed-9935-39b9be9d1117 tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Lock "df410051-d551-4a90-81f7-5630f5521a10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.142s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1970.014173] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]528053fa-a9ba-d1dd-7ce8-67600aad31f1, 'name': SearchDatastore_Task, 'duration_secs': 0.009118} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.015048] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f65d571-d21b-4a07-9df5-ca3385a2640b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.020838] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Waiting for the task: (returnval){ [ 1970.020838] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]524f588b-7963-34dd-006e-8155319cf363" [ 1970.020838] env[63279]: _type = "Task" [ 1970.020838] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.030734] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524f588b-7963-34dd-006e-8155319cf363, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.064075] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087150, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.113048] env[63279]: DEBUG nova.compute.utils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1970.116178] env[63279]: DEBUG nova.compute.manager [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1970.116300] env[63279]: DEBUG nova.network.neutron [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1970.160825] env[63279]: DEBUG nova.policy [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab1648b445ab458d92404e3a5ddb8619', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c98de1240f14b058e58f6a707096ef1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1970.413795] env[63279]: DEBUG nova.compute.manager [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1970.491128] env[63279]: DEBUG nova.network.neutron [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Successfully created port: 699a008e-52f8-4319-9e86-01acc5c037f9 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1970.513239] env[63279]: DEBUG nova.network.neutron [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Updated VIF entry in instance network info cache for port 8aa70697-1a19-4867-93bc-f264c16829ce. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1970.513685] env[63279]: DEBUG nova.network.neutron [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Updating instance_info_cache with network_info: [{"id": "8aa70697-1a19-4867-93bc-f264c16829ce", "address": "fa:16:3e:27:56:a2", "network": {"id": "d065f991-0cec-4435-b5a1-ec50614f5486", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-780609743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ea8d4af9e9b4df8930ef52450936dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aa70697-1a", "ovs_interfaceid": "8aa70697-1a19-4867-93bc-f264c16829ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.534526] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524f588b-7963-34dd-006e-8155319cf363, 'name': SearchDatastore_Task, 'duration_secs': 0.009827} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.534810] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.535081] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ba2d6111-d93d-4216-b641-864b542ea253/ba2d6111-d93d-4216-b641-864b542ea253.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1970.535346] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99311db4-c552-428b-a4b3-fb63d6fdb700 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.541975] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Waiting for the task: (returnval){ [ 1970.541975] env[63279]: value = "task-2087151" [ 1970.541975] env[63279]: _type = "Task" [ 1970.541975] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.550308] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087151, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.560651] env[63279]: DEBUG oslo_vmware.api [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087150, 'name': PowerOnVM_Task, 'duration_secs': 0.92409} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.560920] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1970.561147] env[63279]: INFO nova.compute.manager [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Took 7.91 seconds to spawn the instance on the hypervisor. [ 1970.561331] env[63279]: DEBUG nova.compute.manager [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1970.562116] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d13d9e6-900f-4624-83a4-a3606c40feb2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.616845] env[63279]: DEBUG nova.compute.manager [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1970.949190] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.021393] env[63279]: DEBUG oslo_concurrency.lockutils [req-8c432dce-5065-4704-bfc7-7361b94627cf req-63b8767b-ec15-411e-b34d-86ba953bee45 service nova] Releasing lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1971.058103] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087151, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.082859] env[63279]: INFO nova.compute.manager [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Took 38.40 seconds to build instance. [ 1971.238986] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f21d6f1-3fdc-4f87-9470-10ede44c4c36 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.250161] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6433c0b3-bd02-4ccb-ac4d-2b3836025e8e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.281846] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741ed5a4-8669-4cc7-839b-4307dfb8d7f9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.290816] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1a34b8-7f52-4666-b355-d550b18112bc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.304177] env[63279]: DEBUG nova.compute.provider_tree [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1971.307287] env[63279]: DEBUG nova.compute.manager [req-107137af-5525-4908-a480-3cd3cb6207e7 req-f24a271c-8d50-4473-995c-85530d125b73 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Received event network-changed-8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1971.307477] env[63279]: DEBUG nova.compute.manager [req-107137af-5525-4908-a480-3cd3cb6207e7 req-f24a271c-8d50-4473-995c-85530d125b73 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Refreshing instance network info cache due to event network-changed-8aa70697-1a19-4867-93bc-f264c16829ce. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1971.307690] env[63279]: DEBUG oslo_concurrency.lockutils [req-107137af-5525-4908-a480-3cd3cb6207e7 req-f24a271c-8d50-4473-995c-85530d125b73 service nova] Acquiring lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1971.307861] env[63279]: DEBUG oslo_concurrency.lockutils [req-107137af-5525-4908-a480-3cd3cb6207e7 req-f24a271c-8d50-4473-995c-85530d125b73 service nova] Acquired lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1971.308043] env[63279]: DEBUG nova.network.neutron [req-107137af-5525-4908-a480-3cd3cb6207e7 req-f24a271c-8d50-4473-995c-85530d125b73 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Refreshing network info cache for port 8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1971.553065] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087151, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.585128] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3fabae08-e926-4119-bd9e-b65ca32a47fe tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Lock "3f888d81-e73f-4486-bb64-849c873449bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.448s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.602296] env[63279]: DEBUG nova.compute.manager [req-a246f26c-a9bc-4937-bb9e-9990a21a6c0a req-2a03fec3-265a-4d6f-8221-8d7843a2f672 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Received event network-changed-8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1971.602296] env[63279]: DEBUG nova.compute.manager [req-a246f26c-a9bc-4937-bb9e-9990a21a6c0a req-2a03fec3-265a-4d6f-8221-8d7843a2f672 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Refreshing instance network info cache due to event network-changed-8aa70697-1a19-4867-93bc-f264c16829ce. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1971.602296] env[63279]: DEBUG oslo_concurrency.lockutils [req-a246f26c-a9bc-4937-bb9e-9990a21a6c0a req-2a03fec3-265a-4d6f-8221-8d7843a2f672 service nova] Acquiring lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1971.632806] env[63279]: DEBUG nova.compute.manager [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1971.659971] env[63279]: DEBUG nova.virt.hardware [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1971.660078] env[63279]: DEBUG nova.virt.hardware [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1971.660232] env[63279]: DEBUG nova.virt.hardware [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1971.660427] env[63279]: DEBUG nova.virt.hardware [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1971.660573] env[63279]: DEBUG nova.virt.hardware [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1971.660843] env[63279]: DEBUG nova.virt.hardware [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1971.660928] env[63279]: DEBUG nova.virt.hardware [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1971.661120] env[63279]: DEBUG nova.virt.hardware [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1971.661310] env[63279]: DEBUG nova.virt.hardware [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1971.661480] env[63279]: DEBUG nova.virt.hardware [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1971.661656] env[63279]: DEBUG nova.virt.hardware [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1971.662656] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85ce7f2-ff4a-4252-b4fa-703fde9d31e7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.671692] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f36e12b-86de-4e0d-9619-7dcb5411847f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.810324] env[63279]: DEBUG nova.scheduler.client.report [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1971.940710] env[63279]: DEBUG nova.compute.manager [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1971.944103] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906eb968-8583-4bbf-bbc5-a47a54a1f2c1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.053051] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087151, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.053898] env[63279]: DEBUG nova.network.neutron [req-107137af-5525-4908-a480-3cd3cb6207e7 req-f24a271c-8d50-4473-995c-85530d125b73 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Updated VIF entry in instance network info cache for port 8aa70697-1a19-4867-93bc-f264c16829ce. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1972.054187] env[63279]: DEBUG nova.network.neutron [req-107137af-5525-4908-a480-3cd3cb6207e7 req-f24a271c-8d50-4473-995c-85530d125b73 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Updating instance_info_cache with network_info: [{"id": "8aa70697-1a19-4867-93bc-f264c16829ce", "address": "fa:16:3e:27:56:a2", "network": {"id": "d065f991-0cec-4435-b5a1-ec50614f5486", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-780609743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ea8d4af9e9b4df8930ef52450936dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aa70697-1a", "ovs_interfaceid": "8aa70697-1a19-4867-93bc-f264c16829ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1972.090991] env[63279]: DEBUG nova.compute.manager [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1972.321218] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.715s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.321863] env[63279]: DEBUG nova.compute.manager [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1972.328042] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.006s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.329729] env[63279]: INFO nova.compute.claims [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1972.457070] env[63279]: INFO nova.compute.manager [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] instance snapshotting [ 1972.459993] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b11fa1-f81a-4d3e-83bb-3ec7be3b9ac9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.482168] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a54169-2703-4641-b219-8be983d55541 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.553078] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087151, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.678227} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.553384] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ba2d6111-d93d-4216-b641-864b542ea253/ba2d6111-d93d-4216-b641-864b542ea253.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1972.553601] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1972.554011] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-939ba185-d4f6-484a-81f0-f2e77a5a2b7e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.556433] env[63279]: DEBUG oslo_concurrency.lockutils [req-107137af-5525-4908-a480-3cd3cb6207e7 req-f24a271c-8d50-4473-995c-85530d125b73 service nova] Releasing lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1972.556855] env[63279]: DEBUG oslo_concurrency.lockutils [req-a246f26c-a9bc-4937-bb9e-9990a21a6c0a req-2a03fec3-265a-4d6f-8221-8d7843a2f672 service nova] Acquired lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1972.559494] env[63279]: DEBUG nova.network.neutron [req-a246f26c-a9bc-4937-bb9e-9990a21a6c0a req-2a03fec3-265a-4d6f-8221-8d7843a2f672 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Refreshing network info cache for port 8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1972.564134] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Waiting for the task: (returnval){ [ 1972.564134] env[63279]: value = "task-2087152" [ 1972.564134] env[63279]: _type = "Task" [ 1972.564134] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.575571] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087152, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.591927] env[63279]: DEBUG nova.network.neutron [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Successfully updated port: 699a008e-52f8-4319-9e86-01acc5c037f9 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1972.612895] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.836236] env[63279]: DEBUG nova.compute.utils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1972.840048] env[63279]: DEBUG nova.compute.manager [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1972.840136] env[63279]: DEBUG nova.network.neutron [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1972.889350] env[63279]: DEBUG nova.policy [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dfe7035705504776b55a88c30e61a010', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba92e8896d7a4605bec96ce7ee7d4a4d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1972.993722] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1972.994118] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-82592f12-b2da-4554-ab57-80e7b570a3ac {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.002559] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1973.002559] env[63279]: value = "task-2087153" [ 1973.002559] env[63279]: _type = "Task" [ 1973.002559] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.011836] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087153, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.074521] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087152, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.246376} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.074521] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1973.075094] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5629ca2d-bffd-47fe-8af6-79ec09aabee0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.096826] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] ba2d6111-d93d-4216-b641-864b542ea253/ba2d6111-d93d-4216-b641-864b542ea253.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1973.097435] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "refresh_cache-ad435281-55a0-418a-8400-5c461a5c15ef" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1973.097988] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "refresh_cache-ad435281-55a0-418a-8400-5c461a5c15ef" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1973.097988] env[63279]: DEBUG nova.network.neutron [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1973.098765] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3877d0be-8627-4b9f-ba71-a73d37854ccd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.120992] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Waiting for the task: (returnval){ [ 1973.120992] env[63279]: value = "task-2087154" [ 1973.120992] env[63279]: _type = "Task" [ 1973.120992] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.133428] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087154, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.340710] env[63279]: DEBUG nova.compute.manager [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1973.364386] env[63279]: DEBUG nova.network.neutron [req-a246f26c-a9bc-4937-bb9e-9990a21a6c0a req-2a03fec3-265a-4d6f-8221-8d7843a2f672 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Updated VIF entry in instance network info cache for port 8aa70697-1a19-4867-93bc-f264c16829ce. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1973.364957] env[63279]: DEBUG nova.network.neutron [req-a246f26c-a9bc-4937-bb9e-9990a21a6c0a req-2a03fec3-265a-4d6f-8221-8d7843a2f672 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Updating instance_info_cache with network_info: [{"id": "8aa70697-1a19-4867-93bc-f264c16829ce", "address": "fa:16:3e:27:56:a2", "network": {"id": "d065f991-0cec-4435-b5a1-ec50614f5486", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-780609743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ea8d4af9e9b4df8930ef52450936dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aa70697-1a", "ovs_interfaceid": "8aa70697-1a19-4867-93bc-f264c16829ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.380329] env[63279]: DEBUG nova.network.neutron [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Successfully created port: 568079c6-5b60-4517-9e83-f526ccdaa586 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1973.431810] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquiring lock "044335c7-ce3b-4b4a-b1dc-8b9acec538b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.432070] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Lock "044335c7-ce3b-4b4a-b1dc-8b9acec538b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.432278] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquiring lock "044335c7-ce3b-4b4a-b1dc-8b9acec538b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.432464] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Lock "044335c7-ce3b-4b4a-b1dc-8b9acec538b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.432634] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Lock "044335c7-ce3b-4b4a-b1dc-8b9acec538b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.436538] env[63279]: INFO nova.compute.manager [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Terminating instance [ 1973.520055] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087153, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.632685] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087154, 'name': ReconfigVM_Task, 'duration_secs': 0.288528} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.635574] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Reconfigured VM instance instance-0000002b to attach disk [datastore1] ba2d6111-d93d-4216-b641-864b542ea253/ba2d6111-d93d-4216-b641-864b542ea253.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1973.636503] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5ec4b8a-1ca7-460d-9426-60ff3aec1d3c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.643744] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Waiting for the task: (returnval){ [ 1973.643744] env[63279]: value = "task-2087155" [ 1973.643744] env[63279]: _type = "Task" [ 1973.643744] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.653878] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087155, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.654795] env[63279]: DEBUG nova.network.neutron [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1973.837415] env[63279]: DEBUG nova.network.neutron [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Updating instance_info_cache with network_info: [{"id": "699a008e-52f8-4319-9e86-01acc5c037f9", "address": "fa:16:3e:cc:b8:c1", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap699a008e-52", "ovs_interfaceid": "699a008e-52f8-4319-9e86-01acc5c037f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.868362] env[63279]: DEBUG oslo_concurrency.lockutils [req-a246f26c-a9bc-4937-bb9e-9990a21a6c0a req-2a03fec3-265a-4d6f-8221-8d7843a2f672 service nova] Releasing lock "refresh_cache-044335c7-ce3b-4b4a-b1dc-8b9acec538b4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1973.922072] env[63279]: DEBUG nova.compute.manager [req-98254710-f29e-4603-b6c7-1ae1dae147e5 req-3989cfda-49d2-4988-8cba-b69581b8c73c service nova] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Received event network-vif-plugged-699a008e-52f8-4319-9e86-01acc5c037f9 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1973.922256] env[63279]: DEBUG oslo_concurrency.lockutils [req-98254710-f29e-4603-b6c7-1ae1dae147e5 req-3989cfda-49d2-4988-8cba-b69581b8c73c service nova] Acquiring lock "ad435281-55a0-418a-8400-5c461a5c15ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.922414] env[63279]: DEBUG oslo_concurrency.lockutils [req-98254710-f29e-4603-b6c7-1ae1dae147e5 req-3989cfda-49d2-4988-8cba-b69581b8c73c service nova] Lock "ad435281-55a0-418a-8400-5c461a5c15ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.922651] env[63279]: DEBUG oslo_concurrency.lockutils [req-98254710-f29e-4603-b6c7-1ae1dae147e5 req-3989cfda-49d2-4988-8cba-b69581b8c73c service nova] Lock "ad435281-55a0-418a-8400-5c461a5c15ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.922898] env[63279]: DEBUG nova.compute.manager [req-98254710-f29e-4603-b6c7-1ae1dae147e5 req-3989cfda-49d2-4988-8cba-b69581b8c73c service nova] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] No waiting events found dispatching network-vif-plugged-699a008e-52f8-4319-9e86-01acc5c037f9 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1973.923086] env[63279]: WARNING nova.compute.manager [req-98254710-f29e-4603-b6c7-1ae1dae147e5 req-3989cfda-49d2-4988-8cba-b69581b8c73c service nova] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Received unexpected event network-vif-plugged-699a008e-52f8-4319-9e86-01acc5c037f9 for instance with vm_state building and task_state spawning. [ 1973.923290] env[63279]: DEBUG nova.compute.manager [req-98254710-f29e-4603-b6c7-1ae1dae147e5 req-3989cfda-49d2-4988-8cba-b69581b8c73c service nova] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Received event network-changed-699a008e-52f8-4319-9e86-01acc5c037f9 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1973.923399] env[63279]: DEBUG nova.compute.manager [req-98254710-f29e-4603-b6c7-1ae1dae147e5 req-3989cfda-49d2-4988-8cba-b69581b8c73c service nova] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Refreshing instance network info cache due to event network-changed-699a008e-52f8-4319-9e86-01acc5c037f9. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1973.923706] env[63279]: DEBUG oslo_concurrency.lockutils [req-98254710-f29e-4603-b6c7-1ae1dae147e5 req-3989cfda-49d2-4988-8cba-b69581b8c73c service nova] Acquiring lock "refresh_cache-ad435281-55a0-418a-8400-5c461a5c15ef" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1973.928398] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18b21c4-32c6-4f3e-926e-96a9bb57f7c6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.936616] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1555469d-d496-4f27-983f-2be8e6436970 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.940809] env[63279]: DEBUG nova.compute.manager [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1973.941184] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1973.942344] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34281b11-aa72-4c56-8237-6d2ce29872bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.950824] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1973.981601] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6811d674-4936-46b9-b61a-e4b73c0e799e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.984624] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0c994b-a867-487c-8a3a-1ab6dbb9f40d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.995257] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a862d3c-6b75-4f3d-9597-e4c01d270ab8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.000479] env[63279]: DEBUG oslo_vmware.api [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1974.000479] env[63279]: value = "task-2087156" [ 1974.000479] env[63279]: _type = "Task" [ 1974.000479] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.013680] env[63279]: DEBUG nova.compute.provider_tree [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1974.028649] env[63279]: DEBUG oslo_vmware.api [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087156, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.030910] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087153, 'name': CreateSnapshot_Task, 'duration_secs': 0.541913} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.031179] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1974.032324] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36e4b99-8e94-43d3-80ca-c74ada8f26cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.153439] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087155, 'name': Rename_Task, 'duration_secs': 0.298171} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.153737] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1974.153990] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a275c7b6-44bb-47d4-9003-741dcf903bdb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.160696] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Waiting for the task: (returnval){ [ 1974.160696] env[63279]: value = "task-2087157" [ 1974.160696] env[63279]: _type = "Task" [ 1974.160696] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.176313] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087157, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.340176] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "refresh_cache-ad435281-55a0-418a-8400-5c461a5c15ef" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1974.340976] env[63279]: DEBUG nova.compute.manager [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Instance network_info: |[{"id": "699a008e-52f8-4319-9e86-01acc5c037f9", "address": "fa:16:3e:cc:b8:c1", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap699a008e-52", "ovs_interfaceid": "699a008e-52f8-4319-9e86-01acc5c037f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1974.341279] env[63279]: DEBUG oslo_concurrency.lockutils [req-98254710-f29e-4603-b6c7-1ae1dae147e5 req-3989cfda-49d2-4988-8cba-b69581b8c73c service nova] Acquired lock "refresh_cache-ad435281-55a0-418a-8400-5c461a5c15ef" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1974.341470] env[63279]: DEBUG nova.network.neutron [req-98254710-f29e-4603-b6c7-1ae1dae147e5 req-3989cfda-49d2-4988-8cba-b69581b8c73c service nova] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Refreshing network info cache for port 699a008e-52f8-4319-9e86-01acc5c037f9 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1974.342857] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:b8:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '699a008e-52f8-4319-9e86-01acc5c037f9', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1974.351286] env[63279]: DEBUG oslo.service.loopingcall [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1974.352556] env[63279]: DEBUG nova.compute.manager [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1974.355206] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1974.356432] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-466095f4-d6f8-4091-a711-ff9e4452dec3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.381104] env[63279]: DEBUG nova.virt.hardware [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1974.381370] env[63279]: DEBUG nova.virt.hardware [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1974.381852] env[63279]: DEBUG nova.virt.hardware [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1974.381852] env[63279]: DEBUG nova.virt.hardware [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1974.382019] env[63279]: DEBUG nova.virt.hardware [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1974.382055] env[63279]: DEBUG nova.virt.hardware [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1974.382252] env[63279]: DEBUG nova.virt.hardware [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1974.382447] env[63279]: DEBUG nova.virt.hardware [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1974.382622] env[63279]: DEBUG nova.virt.hardware [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1974.382790] env[63279]: DEBUG nova.virt.hardware [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1974.383127] env[63279]: DEBUG nova.virt.hardware [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1974.384447] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c2340d-d4bb-4573-af14-5b30a91b43b4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.388361] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1974.388361] env[63279]: value = "task-2087158" [ 1974.388361] env[63279]: _type = "Task" [ 1974.388361] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.396012] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ea2f43-eaf6-4984-86ba-f02cbffd7127 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.403431] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087158, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.513010] env[63279]: DEBUG oslo_vmware.api [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087156, 'name': PowerOffVM_Task, 'duration_secs': 0.207621} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.513323] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1974.513515] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1974.513797] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8d1026f-5b4f-423c-a970-dc6d64b1b3b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.523053] env[63279]: DEBUG nova.scheduler.client.report [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1974.551650] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1974.552660] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f37bd72d-7de7-48e0-9290-5ed1c886456e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.560668] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1974.560668] env[63279]: value = "task-2087160" [ 1974.560668] env[63279]: _type = "Task" [ 1974.560668] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.569176] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087160, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.671359] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087157, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.672831] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1974.673086] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1974.673368] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Deleting the datastore file [datastore1] 044335c7-ce3b-4b4a-b1dc-8b9acec538b4 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1974.673623] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b435ba73-622c-421f-aac2-99e9e118ea4c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.680384] env[63279]: DEBUG oslo_vmware.api [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for the task: (returnval){ [ 1974.680384] env[63279]: value = "task-2087161" [ 1974.680384] env[63279]: _type = "Task" [ 1974.680384] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.693038] env[63279]: DEBUG oslo_vmware.api [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087161, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.900158] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087158, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.027954] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.701s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.028656] env[63279]: DEBUG nova.compute.manager [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1975.033548] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 26.769s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.071714] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087160, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.129481] env[63279]: DEBUG nova.network.neutron [req-98254710-f29e-4603-b6c7-1ae1dae147e5 req-3989cfda-49d2-4988-8cba-b69581b8c73c service nova] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Updated VIF entry in instance network info cache for port 699a008e-52f8-4319-9e86-01acc5c037f9. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1975.129481] env[63279]: DEBUG nova.network.neutron [req-98254710-f29e-4603-b6c7-1ae1dae147e5 req-3989cfda-49d2-4988-8cba-b69581b8c73c service nova] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Updating instance_info_cache with network_info: [{"id": "699a008e-52f8-4319-9e86-01acc5c037f9", "address": "fa:16:3e:cc:b8:c1", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap699a008e-52", "ovs_interfaceid": "699a008e-52f8-4319-9e86-01acc5c037f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1975.172830] env[63279]: DEBUG oslo_vmware.api [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087157, 'name': PowerOnVM_Task, 'duration_secs': 0.928241} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.173453] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1975.173667] env[63279]: INFO nova.compute.manager [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Took 9.79 seconds to spawn the instance on the hypervisor. [ 1975.174405] env[63279]: DEBUG nova.compute.manager [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1975.174640] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975e3734-4df0-402b-a080-455f849dc98d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.194021] env[63279]: DEBUG oslo_vmware.api [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Task: {'id': task-2087161, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184628} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.194021] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1975.194021] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1975.194021] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1975.194021] env[63279]: INFO nova.compute.manager [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1975.194393] env[63279]: DEBUG oslo.service.loopingcall [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1975.194393] env[63279]: DEBUG nova.compute.manager [-] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1975.194393] env[63279]: DEBUG nova.network.neutron [-] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1975.288219] env[63279]: DEBUG nova.network.neutron [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Successfully updated port: 568079c6-5b60-4517-9e83-f526ccdaa586 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1975.401305] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087158, 'name': CreateVM_Task, 'duration_secs': 0.66311} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.401491] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1975.402186] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.402361] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.402682] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1975.402947] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75a93349-a52b-457a-964d-a28ef6a98cfe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.407903] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1975.407903] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ec66f6-dd1c-4a32-27a2-c70daf834fd9" [ 1975.407903] env[63279]: _type = "Task" [ 1975.407903] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.415615] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ec66f6-dd1c-4a32-27a2-c70daf834fd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.537022] env[63279]: DEBUG nova.compute.utils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1975.537022] env[63279]: DEBUG nova.compute.manager [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1975.537022] env[63279]: DEBUG nova.network.neutron [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1975.576247] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087160, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.619816] env[63279]: DEBUG nova.policy [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6343b207f7294f5fa2a8111940083fb0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5b21bc5072e4945a19a782dd9561709', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1975.631246] env[63279]: DEBUG oslo_concurrency.lockutils [req-98254710-f29e-4603-b6c7-1ae1dae147e5 req-3989cfda-49d2-4988-8cba-b69581b8c73c service nova] Releasing lock "refresh_cache-ad435281-55a0-418a-8400-5c461a5c15ef" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.698860] env[63279]: INFO nova.compute.manager [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Took 40.55 seconds to build instance. [ 1975.789320] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.790457] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquired lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.790702] env[63279]: DEBUG nova.network.neutron [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1975.921569] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ec66f6-dd1c-4a32-27a2-c70daf834fd9, 'name': SearchDatastore_Task, 'duration_secs': 0.009654} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.921886] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.922134] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1975.922524] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.922681] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.922865] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1975.923150] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ea1c95d-95fd-4ca3-8bd6-9165e6734bd1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.931697] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1975.931883] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1975.933025] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-572c863e-e5d8-4810-91dc-538657b3a8f1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.938535] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1975.938535] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523057fb-41a3-5c56-9fc6-d873d87b9ae8" [ 1975.938535] env[63279]: _type = "Task" [ 1975.938535] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.947531] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523057fb-41a3-5c56-9fc6-d873d87b9ae8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.003913] env[63279]: DEBUG nova.network.neutron [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Successfully created port: d609f907-0c7e-4293-8b21-721e712e8dc2 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1976.040531] env[63279]: DEBUG nova.compute.manager [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1976.085507] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087160, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.105033] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105033] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 4a9088e0-2992-4b18-8be9-6bc70633369b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105033] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance fcc5a636-554f-424e-a604-a8e7bd7cf574 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1976.105033] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 5d4909ea-396c-45ba-9ff5-acb8576150b3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1976.105033] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 1a55008a-4d8c-403d-a8f4-966aa7346f4c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1976.105302] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 5d4be656-defe-4332-b97e-e88b107ca4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105302] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 357f08c9-4de9-4b84-8384-6bf130872f40 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105302] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance ecedded1-7169-49a4-8a9e-2fe4086db986 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1976.105302] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance ff2f355a-9687-4491-b243-6133e4b7b866 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1976.105431] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 32dbef6d-d314-4fa6-972a-e7b1f22eb11d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105431] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 2f5e22f6-ba70-4848-965b-eb1553115323 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105431] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 5656c853-ac83-47be-83c4-979a9e87ab91 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105431] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance c1ac4af5-b01e-4175-844f-7a67b2ef7526 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105551] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105551] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance acf95fad-316c-4605-9008-24d4d7c05892 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105551] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance de543869-8ab1-40ed-8f6d-dc506c257843 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1976.105551] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 81103d53-99fe-4d1a-816f-7685c59c80ee is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1976.105551] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance f375b54b-f9de-4529-b752-52c240aed532 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105720] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance e04f06de-da6a-4562-a50a-ff16bf3a006e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1976.105720] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance cf1b70af-335d-404b-bb4f-fe082dd6f450 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1976.105720] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance c287072d-0ce9-4075-8895-0f64326ac303 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1976.105720] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 044335c7-ce3b-4b4a-b1dc-8b9acec538b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105720] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance df410051-d551-4a90-81f7-5630f5521a10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105919] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 3f888d81-e73f-4486-bb64-849c873449bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105919] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance ba2d6111-d93d-4216-b641-864b542ea253 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105919] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance ad435281-55a0-418a-8400-5c461a5c15ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.105919] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 79032b2a-74f7-4c6d-8f71-f848fe372ba2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.106059] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance eca98392-98be-405b-b799-463ef9ee3dc8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1976.142171] env[63279]: DEBUG nova.compute.manager [req-04fafc10-fb05-4c1a-aa66-6cf166bbc361 req-877c4f6d-3690-49ed-a894-5ca48a0f7262 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Received event network-vif-deleted-8aa70697-1a19-4867-93bc-f264c16829ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1976.142404] env[63279]: INFO nova.compute.manager [req-04fafc10-fb05-4c1a-aa66-6cf166bbc361 req-877c4f6d-3690-49ed-a894-5ca48a0f7262 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Neutron deleted interface 8aa70697-1a19-4867-93bc-f264c16829ce; detaching it from the instance and deleting it from the info cache [ 1976.142614] env[63279]: DEBUG nova.network.neutron [req-04fafc10-fb05-4c1a-aa66-6cf166bbc361 req-877c4f6d-3690-49ed-a894-5ca48a0f7262 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1976.202063] env[63279]: DEBUG oslo_concurrency.lockutils [None req-db66a17d-ea76-4394-a0de-e69c5b562193 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lock "ba2d6111-d93d-4216-b641-864b542ea253" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.506s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1976.261079] env[63279]: DEBUG nova.network.neutron [-] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1976.343741] env[63279]: DEBUG nova.network.neutron [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1976.360302] env[63279]: DEBUG nova.compute.manager [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Received event network-changed-77f48a61-90d3-4ff9-b672-52bd1900e2a2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1976.364017] env[63279]: DEBUG nova.compute.manager [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Refreshing instance network info cache due to event network-changed-77f48a61-90d3-4ff9-b672-52bd1900e2a2. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1976.364017] env[63279]: DEBUG oslo_concurrency.lockutils [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] Acquiring lock "refresh_cache-3f888d81-e73f-4486-bb64-849c873449bf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.364017] env[63279]: DEBUG oslo_concurrency.lockutils [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] Acquired lock "refresh_cache-3f888d81-e73f-4486-bb64-849c873449bf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.364017] env[63279]: DEBUG nova.network.neutron [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Refreshing network info cache for port 77f48a61-90d3-4ff9-b672-52bd1900e2a2 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1976.450989] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523057fb-41a3-5c56-9fc6-d873d87b9ae8, 'name': SearchDatastore_Task, 'duration_secs': 0.009609} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.451808] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5244696f-f538-4c9f-92d9-4e921c0a2743 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.458057] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1976.458057] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]520a6c60-9dfc-83c5-65b5-e5f5cf04cad3" [ 1976.458057] env[63279]: _type = "Task" [ 1976.458057] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.466403] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520a6c60-9dfc-83c5-65b5-e5f5cf04cad3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.556587] env[63279]: DEBUG nova.network.neutron [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Updating instance_info_cache with network_info: [{"id": "568079c6-5b60-4517-9e83-f526ccdaa586", "address": "fa:16:3e:e1:18:dd", "network": {"id": "948d327b-554a-4c1d-a483-9a067d60f6bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1383523654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba92e8896d7a4605bec96ce7ee7d4a4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap568079c6-5b", "ovs_interfaceid": "568079c6-5b60-4517-9e83-f526ccdaa586", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1976.585534] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087160, 'name': CloneVM_Task, 'duration_secs': 1.895994} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.585840] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Created linked-clone VM from snapshot [ 1976.588060] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0e9b94-61d7-4a31-8925-143218e8e27e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.599498] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Uploading image e646b39f-4fee-487a-bfeb-9b7128ee4e8b {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1976.609750] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 32789822-cb54-43e7-beae-b5ed3002f4ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1976.629052] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1976.629052] env[63279]: value = "vm-427626" [ 1976.629052] env[63279]: _type = "VirtualMachine" [ 1976.629052] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1976.629723] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-725ad922-0f60-478a-9693-2b8f11a691c5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.637870] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lease: (returnval){ [ 1976.637870] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52303c4f-5d0e-b18e-f729-482dfd6edd1b" [ 1976.637870] env[63279]: _type = "HttpNfcLease" [ 1976.637870] env[63279]: } obtained for exporting VM: (result){ [ 1976.637870] env[63279]: value = "vm-427626" [ 1976.637870] env[63279]: _type = "VirtualMachine" [ 1976.637870] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1976.638440] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the lease: (returnval){ [ 1976.638440] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52303c4f-5d0e-b18e-f729-482dfd6edd1b" [ 1976.638440] env[63279]: _type = "HttpNfcLease" [ 1976.638440] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1976.647347] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1976.647347] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52303c4f-5d0e-b18e-f729-482dfd6edd1b" [ 1976.647347] env[63279]: _type = "HttpNfcLease" [ 1976.647347] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1976.647537] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8f40185-63a9-4aaa-a805-9d10d633a9a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.655639] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cc163b-2c3a-4893-8788-0256679d71c2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.689041] env[63279]: DEBUG nova.compute.manager [req-04fafc10-fb05-4c1a-aa66-6cf166bbc361 req-877c4f6d-3690-49ed-a894-5ca48a0f7262 service nova] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Detach interface failed, port_id=8aa70697-1a19-4867-93bc-f264c16829ce, reason: Instance 044335c7-ce3b-4b4a-b1dc-8b9acec538b4 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 1976.706309] env[63279]: DEBUG nova.compute.manager [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1976.763884] env[63279]: INFO nova.compute.manager [-] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Took 1.57 seconds to deallocate network for instance. [ 1976.969256] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520a6c60-9dfc-83c5-65b5-e5f5cf04cad3, 'name': SearchDatastore_Task, 'duration_secs': 0.015587} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.969527] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.969778] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ad435281-55a0-418a-8400-5c461a5c15ef/ad435281-55a0-418a-8400-5c461a5c15ef.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1976.970046] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-329d84e2-95cd-4353-8c92-17c4ceb026e2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.976810] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1976.976810] env[63279]: value = "task-2087163" [ 1976.976810] env[63279]: _type = "Task" [ 1976.976810] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.984710] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087163, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.050758] env[63279]: DEBUG nova.compute.manager [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1977.058890] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Releasing lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.059185] env[63279]: DEBUG nova.compute.manager [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Instance network_info: |[{"id": "568079c6-5b60-4517-9e83-f526ccdaa586", "address": "fa:16:3e:e1:18:dd", "network": {"id": "948d327b-554a-4c1d-a483-9a067d60f6bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1383523654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba92e8896d7a4605bec96ce7ee7d4a4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap568079c6-5b", "ovs_interfaceid": "568079c6-5b60-4517-9e83-f526ccdaa586", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1977.060329] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:18:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '471f65a5-21ea-45e3-a722-4e204ed65673', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '568079c6-5b60-4517-9e83-f526ccdaa586', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1977.069018] env[63279]: DEBUG oslo.service.loopingcall [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1977.069018] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1977.069920] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f87ac2a5-ca23-4d6d-b765-195a54ac091c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.091017] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1977.091017] env[63279]: value = "task-2087164" [ 1977.091017] env[63279]: _type = "Task" [ 1977.091017] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.092543] env[63279]: DEBUG nova.virt.hardware [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1977.092776] env[63279]: DEBUG nova.virt.hardware [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1977.092929] env[63279]: DEBUG nova.virt.hardware [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1977.093129] env[63279]: DEBUG nova.virt.hardware [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1977.093280] env[63279]: DEBUG nova.virt.hardware [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1977.093434] env[63279]: DEBUG nova.virt.hardware [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1977.093638] env[63279]: DEBUG nova.virt.hardware [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1977.093798] env[63279]: DEBUG nova.virt.hardware [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1977.093966] env[63279]: DEBUG nova.virt.hardware [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1977.094159] env[63279]: DEBUG nova.virt.hardware [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1977.094337] env[63279]: DEBUG nova.virt.hardware [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1977.095169] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40af45f8-610e-4062-bb06-722fa35e9c36 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.110232] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e272f877-d9ac-4f0f-b490-eb9197023819 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.115918] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087164, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.116596] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 1b2ca21b-feea-4fc1-9ddc-99f144e4241a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1977.145757] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1977.145757] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52303c4f-5d0e-b18e-f729-482dfd6edd1b" [ 1977.145757] env[63279]: _type = "HttpNfcLease" [ 1977.145757] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1977.146875] env[63279]: DEBUG nova.network.neutron [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Updated VIF entry in instance network info cache for port 77f48a61-90d3-4ff9-b672-52bd1900e2a2. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1977.147210] env[63279]: DEBUG nova.network.neutron [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Updating instance_info_cache with network_info: [{"id": "77f48a61-90d3-4ff9-b672-52bd1900e2a2", "address": "fa:16:3e:cb:ca:1a", "network": {"id": "c4927c75-f2ae-4879-8ca5-ffbe46e14c4d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-110838540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebd277b3520646d8b0cb2f260d42e447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77f48a61-90", "ovs_interfaceid": "77f48a61-90d3-4ff9-b672-52bd1900e2a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1977.148516] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1977.148516] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52303c4f-5d0e-b18e-f729-482dfd6edd1b" [ 1977.148516] env[63279]: _type = "HttpNfcLease" [ 1977.148516] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1977.149895] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc88f229-1d1d-4ddd-b5e0-db908d639c74 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.159908] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5271999c-8cdf-856a-306f-3ec19d4812a6/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1977.160078] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5271999c-8cdf-856a-306f-3ec19d4812a6/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1977.237432] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.270986] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.273438] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-63c990f7-479d-43f0-8213-ad07712c9c65 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.486759] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087163, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484625} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.487048] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ad435281-55a0-418a-8400-5c461a5c15ef/ad435281-55a0-418a-8400-5c461a5c15ef.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1977.487275] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1977.487533] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ac9dcb5-4d59-433c-adab-df81bd917899 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.493951] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5f650a7e-7af0-4df1-a9de-9807540e6cc3 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Acquiring lock "interface-ba2d6111-d93d-4216-b641-864b542ea253-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.494441] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5f650a7e-7af0-4df1-a9de-9807540e6cc3 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lock "interface-ba2d6111-d93d-4216-b641-864b542ea253-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.495071] env[63279]: DEBUG nova.objects.instance [None req-5f650a7e-7af0-4df1-a9de-9807540e6cc3 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lazy-loading 'flavor' on Instance uuid ba2d6111-d93d-4216-b641-864b542ea253 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1977.500186] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1977.500186] env[63279]: value = "task-2087165" [ 1977.500186] env[63279]: _type = "Task" [ 1977.500186] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.510349] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087165, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.605281] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087164, 'name': CreateVM_Task, 'duration_secs': 0.507961} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.605281] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1977.605741] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1977.605964] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1977.606413] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1977.606753] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34ab0771-30b7-4555-963b-ddc9b6b226d9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.612907] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1977.612907] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526afaa9-1a03-b518-4f13-55ae3668c19f" [ 1977.612907] env[63279]: _type = "Task" [ 1977.612907] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.619255] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance fb124cfa-24b4-4712-b8cc-c87df5d6231b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1977.626577] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526afaa9-1a03-b518-4f13-55ae3668c19f, 'name': SearchDatastore_Task, 'duration_secs': 0.008745} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.627481] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.627896] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1977.628290] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1977.628581] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1977.628952] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1977.629393] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b27095cf-8318-4b80-8929-fbba771a5337 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.641184] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1977.641489] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1977.642927] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-269c7ca3-6eea-40cd-ab4d-5aca380c3691 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.651120] env[63279]: DEBUG oslo_concurrency.lockutils [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] Releasing lock "refresh_cache-3f888d81-e73f-4486-bb64-849c873449bf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.651465] env[63279]: DEBUG nova.compute.manager [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Received event network-vif-plugged-568079c6-5b60-4517-9e83-f526ccdaa586 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1977.651732] env[63279]: DEBUG oslo_concurrency.lockutils [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] Acquiring lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.652302] env[63279]: DEBUG oslo_concurrency.lockutils [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] Lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.652809] env[63279]: DEBUG oslo_concurrency.lockutils [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] Lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1977.653311] env[63279]: DEBUG nova.compute.manager [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] No waiting events found dispatching network-vif-plugged-568079c6-5b60-4517-9e83-f526ccdaa586 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1977.653564] env[63279]: WARNING nova.compute.manager [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Received unexpected event network-vif-plugged-568079c6-5b60-4517-9e83-f526ccdaa586 for instance with vm_state building and task_state spawning. [ 1977.653802] env[63279]: DEBUG nova.compute.manager [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Received event network-changed-568079c6-5b60-4517-9e83-f526ccdaa586 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1977.654035] env[63279]: DEBUG nova.compute.manager [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Refreshing instance network info cache due to event network-changed-568079c6-5b60-4517-9e83-f526ccdaa586. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1977.654312] env[63279]: DEBUG oslo_concurrency.lockutils [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] Acquiring lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1977.654514] env[63279]: DEBUG oslo_concurrency.lockutils [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] Acquired lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1977.655353] env[63279]: DEBUG nova.network.neutron [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Refreshing network info cache for port 568079c6-5b60-4517-9e83-f526ccdaa586 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1977.660607] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1977.660607] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]520ac99a-46a1-6725-603d-1fc2508568a9" [ 1977.660607] env[63279]: _type = "Task" [ 1977.660607] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.672183] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520ac99a-46a1-6725-603d-1fc2508568a9, 'name': SearchDatastore_Task, 'duration_secs': 0.008815} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.673580] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45bdb3b8-2b28-4fd9-ba8a-3505e6c585fa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.680544] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1977.680544] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52942d78-69bc-6b01-b916-1ffa0afc1613" [ 1977.680544] env[63279]: _type = "Task" [ 1977.680544] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.692188] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52942d78-69bc-6b01-b916-1ffa0afc1613, 'name': SearchDatastore_Task, 'duration_secs': 0.008694} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.692481] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.692837] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 79032b2a-74f7-4c6d-8f71-f848fe372ba2/79032b2a-74f7-4c6d-8f71-f848fe372ba2.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1977.693237] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2ae6f4c-4041-4e1b-bbee-53330fcf6546 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.700154] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1977.700154] env[63279]: value = "task-2087166" [ 1977.700154] env[63279]: _type = "Task" [ 1977.700154] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.708578] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087166, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.001397] env[63279]: DEBUG nova.objects.instance [None req-5f650a7e-7af0-4df1-a9de-9807540e6cc3 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lazy-loading 'pci_requests' on Instance uuid ba2d6111-d93d-4216-b641-864b542ea253 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1978.016562] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087165, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078481} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.016562] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1978.016562] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb649ab4-140e-402a-80d4-02d57a687d06 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.021242] env[63279]: DEBUG nova.network.neutron [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Successfully updated port: d609f907-0c7e-4293-8b21-721e712e8dc2 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1978.043414] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] ad435281-55a0-418a-8400-5c461a5c15ef/ad435281-55a0-418a-8400-5c461a5c15ef.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1978.044310] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ba31a9f-2897-4a19-87ee-9cf8d903199d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.067129] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1978.067129] env[63279]: value = "task-2087167" [ 1978.067129] env[63279]: _type = "Task" [ 1978.067129] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.077139] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087167, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.128757] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 58392790-b297-4894-8d81-e5cbda69872b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1978.214564] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087166, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461756} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.214846] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 79032b2a-74f7-4c6d-8f71-f848fe372ba2/79032b2a-74f7-4c6d-8f71-f848fe372ba2.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1978.215195] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1978.215442] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-00badbae-06ba-44c1-97e0-4e12936dc6a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.223877] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1978.223877] env[63279]: value = "task-2087168" [ 1978.223877] env[63279]: _type = "Task" [ 1978.223877] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.233176] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087168, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.455741] env[63279]: DEBUG nova.network.neutron [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Updated VIF entry in instance network info cache for port 568079c6-5b60-4517-9e83-f526ccdaa586. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1978.456108] env[63279]: DEBUG nova.network.neutron [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Updating instance_info_cache with network_info: [{"id": "568079c6-5b60-4517-9e83-f526ccdaa586", "address": "fa:16:3e:e1:18:dd", "network": {"id": "948d327b-554a-4c1d-a483-9a067d60f6bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1383523654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba92e8896d7a4605bec96ce7ee7d4a4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap568079c6-5b", "ovs_interfaceid": "568079c6-5b60-4517-9e83-f526ccdaa586", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1978.509579] env[63279]: DEBUG nova.objects.base [None req-5f650a7e-7af0-4df1-a9de-9807540e6cc3 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1978.509815] env[63279]: DEBUG nova.network.neutron [None req-5f650a7e-7af0-4df1-a9de-9807540e6cc3 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1978.523460] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1978.523609] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1978.523750] env[63279]: DEBUG nova.network.neutron [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1978.577043] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087167, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.618898] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5f650a7e-7af0-4df1-a9de-9807540e6cc3 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lock "interface-ba2d6111-d93d-4216-b641-864b542ea253-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.124s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1978.632125] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 8ccb4293-927a-45ba-82e9-9f1b4d5985cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1978.733804] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087168, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.214985} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.734446] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1978.735094] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d170325-9383-46bf-84b2-85dce587efc2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.757210] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 79032b2a-74f7-4c6d-8f71-f848fe372ba2/79032b2a-74f7-4c6d-8f71-f848fe372ba2.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1978.757606] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20f6be67-80c2-4f26-8dc4-604802c79aab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.778104] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1978.778104] env[63279]: value = "task-2087169" [ 1978.778104] env[63279]: _type = "Task" [ 1978.778104] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.789500] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087169, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.959804] env[63279]: DEBUG oslo_concurrency.lockutils [req-40c86bd5-e154-4420-9e7c-9c831d05343c req-1ff96358-0c13-4e8b-a433-2d81136d8efa service nova] Releasing lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1979.066129] env[63279]: DEBUG nova.network.neutron [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1979.077718] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087167, 'name': ReconfigVM_Task, 'duration_secs': 0.762182} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.078119] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Reconfigured VM instance instance-0000002d to attach disk [datastore1] ad435281-55a0-418a-8400-5c461a5c15ef/ad435281-55a0-418a-8400-5c461a5c15ef.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1979.078839] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8b5f8522-06fe-41b2-bb80-6b5735423d66 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.085709] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1979.085709] env[63279]: value = "task-2087170" [ 1979.085709] env[63279]: _type = "Task" [ 1979.085709] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.093686] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087170, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.134999] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1979.237329] env[63279]: DEBUG nova.network.neutron [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Updating instance_info_cache with network_info: [{"id": "d609f907-0c7e-4293-8b21-721e712e8dc2", "address": "fa:16:3e:0e:eb:3d", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd609f907-0c", "ovs_interfaceid": "d609f907-0c7e-4293-8b21-721e712e8dc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1979.288794] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087169, 'name': ReconfigVM_Task, 'duration_secs': 0.325178} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.289078] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 79032b2a-74f7-4c6d-8f71-f848fe372ba2/79032b2a-74f7-4c6d-8f71-f848fe372ba2.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1979.289714] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41deaeab-fb38-4ca5-9206-b7ec90ec7d8a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.296659] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1979.296659] env[63279]: value = "task-2087171" [ 1979.296659] env[63279]: _type = "Task" [ 1979.296659] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.304464] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087171, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.596677] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087170, 'name': Rename_Task, 'duration_secs': 0.333276} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.596677] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1979.596677] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a8c83cb-9e08-4416-bb0a-8a091425cfb0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.603264] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1979.603264] env[63279]: value = "task-2087172" [ 1979.603264] env[63279]: _type = "Task" [ 1979.603264] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.611137] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087172, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.638501] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1979.741061] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1979.741061] env[63279]: DEBUG nova.compute.manager [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Instance network_info: |[{"id": "d609f907-0c7e-4293-8b21-721e712e8dc2", "address": "fa:16:3e:0e:eb:3d", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd609f907-0c", "ovs_interfaceid": "d609f907-0c7e-4293-8b21-721e712e8dc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1979.741783] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:eb:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '41278529-8bd2-44a1-97c8-03967faa3ff7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd609f907-0c7e-4293-8b21-721e712e8dc2', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1979.750510] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Creating folder: Project (b5b21bc5072e4945a19a782dd9561709). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1979.750823] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-498a76e0-8b32-474e-8ff6-9db4cd5182f9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.762116] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Created folder: Project (b5b21bc5072e4945a19a782dd9561709) in parent group-v427491. [ 1979.762328] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Creating folder: Instances. Parent ref: group-v427628. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1979.762582] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-40ca84a4-2330-4753-b84c-b2a968137a0c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.771872] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Created folder: Instances in parent group-v427628. [ 1979.772137] env[63279]: DEBUG oslo.service.loopingcall [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1979.772338] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1979.772557] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2d8560c-34c4-423c-b976-dbda2d7089a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.792490] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1979.792490] env[63279]: value = "task-2087175" [ 1979.792490] env[63279]: _type = "Task" [ 1979.792490] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.800668] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087175, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.809280] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087171, 'name': Rename_Task, 'duration_secs': 0.137691} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.809553] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1979.809810] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9abe3723-cd37-4adb-8491-269ee0036dbf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.815347] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1979.815347] env[63279]: value = "task-2087176" [ 1979.815347] env[63279]: _type = "Task" [ 1979.815347] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.823914] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087176, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.938604] env[63279]: DEBUG nova.compute.manager [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Received event network-changed-77f48a61-90d3-4ff9-b672-52bd1900e2a2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1979.938771] env[63279]: DEBUG nova.compute.manager [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Refreshing instance network info cache due to event network-changed-77f48a61-90d3-4ff9-b672-52bd1900e2a2. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1979.939030] env[63279]: DEBUG oslo_concurrency.lockutils [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] Acquiring lock "refresh_cache-3f888d81-e73f-4486-bb64-849c873449bf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1979.939206] env[63279]: DEBUG oslo_concurrency.lockutils [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] Acquired lock "refresh_cache-3f888d81-e73f-4486-bb64-849c873449bf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.939379] env[63279]: DEBUG nova.network.neutron [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Refreshing network info cache for port 77f48a61-90d3-4ff9-b672-52bd1900e2a2 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1980.114152] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087172, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.142389] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance ff9701ed-d545-44b4-911a-c4d809d0a771 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1980.303044] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087175, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.326782] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087176, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.621257] env[63279]: DEBUG oslo_vmware.api [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087172, 'name': PowerOnVM_Task, 'duration_secs': 0.730728} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.624609] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1980.624887] env[63279]: INFO nova.compute.manager [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Took 8.99 seconds to spawn the instance on the hypervisor. [ 1980.625144] env[63279]: DEBUG nova.compute.manager [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1980.626583] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb6bd30-f6bc-4d1d-8c7d-04c58c14e204 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.646323] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 0e12ab9b-a701-4e0f-9d96-939090f50494 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1980.756283] env[63279]: DEBUG nova.network.neutron [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Updated VIF entry in instance network info cache for port 77f48a61-90d3-4ff9-b672-52bd1900e2a2. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1980.756651] env[63279]: DEBUG nova.network.neutron [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Updating instance_info_cache with network_info: [{"id": "77f48a61-90d3-4ff9-b672-52bd1900e2a2", "address": "fa:16:3e:cb:ca:1a", "network": {"id": "c4927c75-f2ae-4879-8ca5-ffbe46e14c4d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-110838540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebd277b3520646d8b0cb2f260d42e447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77f48a61-90", "ovs_interfaceid": "77f48a61-90d3-4ff9-b672-52bd1900e2a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1980.803945] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087175, 'name': CreateVM_Task, 'duration_secs': 0.602102} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.804084] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1980.804779] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1980.804941] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.805272] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1980.805522] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-149352d4-2107-4d23-8418-eacc4801bb67 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.810087] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 1980.810087] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5250652a-92e8-925b-585e-597bd48ff2fc" [ 1980.810087] env[63279]: _type = "Task" [ 1980.810087] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.817961] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5250652a-92e8-925b-585e-597bd48ff2fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.825086] env[63279]: DEBUG oslo_vmware.api [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087176, 'name': PowerOnVM_Task, 'duration_secs': 0.592848} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.825577] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1980.825577] env[63279]: INFO nova.compute.manager [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Took 6.47 seconds to spawn the instance on the hypervisor. [ 1980.825672] env[63279]: DEBUG nova.compute.manager [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1980.826412] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334248bd-f15a-4029-9dc7-c8544a140460 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.920876] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Acquiring lock "df410051-d551-4a90-81f7-5630f5521a10" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.921176] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Lock "df410051-d551-4a90-81f7-5630f5521a10" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.921387] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Acquiring lock "df410051-d551-4a90-81f7-5630f5521a10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.921575] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Lock "df410051-d551-4a90-81f7-5630f5521a10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.921750] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Lock "df410051-d551-4a90-81f7-5630f5521a10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.923956] env[63279]: INFO nova.compute.manager [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Terminating instance [ 1980.985432] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Acquiring lock "ba2d6111-d93d-4216-b641-864b542ea253" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.985720] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lock "ba2d6111-d93d-4216-b641-864b542ea253" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.985938] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Acquiring lock "ba2d6111-d93d-4216-b641-864b542ea253-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.986143] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lock "ba2d6111-d93d-4216-b641-864b542ea253-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.986350] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lock "ba2d6111-d93d-4216-b641-864b542ea253-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.988559] env[63279]: INFO nova.compute.manager [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Terminating instance [ 1981.145130] env[63279]: INFO nova.compute.manager [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Took 39.87 seconds to build instance. [ 1981.151154] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 5b5f87cb-cf35-418f-b5bd-b953524a285c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1981.260077] env[63279]: DEBUG oslo_concurrency.lockutils [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] Releasing lock "refresh_cache-3f888d81-e73f-4486-bb64-849c873449bf" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1981.260454] env[63279]: DEBUG nova.compute.manager [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Received event network-vif-plugged-d609f907-0c7e-4293-8b21-721e712e8dc2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1981.260661] env[63279]: DEBUG oslo_concurrency.lockutils [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] Acquiring lock "eca98392-98be-405b-b799-463ef9ee3dc8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.260873] env[63279]: DEBUG oslo_concurrency.lockutils [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] Lock "eca98392-98be-405b-b799-463ef9ee3dc8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.261133] env[63279]: DEBUG oslo_concurrency.lockutils [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] Lock "eca98392-98be-405b-b799-463ef9ee3dc8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.261315] env[63279]: DEBUG nova.compute.manager [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] No waiting events found dispatching network-vif-plugged-d609f907-0c7e-4293-8b21-721e712e8dc2 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1981.261486] env[63279]: WARNING nova.compute.manager [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Received unexpected event network-vif-plugged-d609f907-0c7e-4293-8b21-721e712e8dc2 for instance with vm_state building and task_state spawning. [ 1981.261652] env[63279]: DEBUG nova.compute.manager [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Received event network-changed-d609f907-0c7e-4293-8b21-721e712e8dc2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1981.261808] env[63279]: DEBUG nova.compute.manager [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Refreshing instance network info cache due to event network-changed-d609f907-0c7e-4293-8b21-721e712e8dc2. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1981.261997] env[63279]: DEBUG oslo_concurrency.lockutils [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] Acquiring lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1981.262153] env[63279]: DEBUG oslo_concurrency.lockutils [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] Acquired lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1981.262316] env[63279]: DEBUG nova.network.neutron [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Refreshing network info cache for port d609f907-0c7e-4293-8b21-721e712e8dc2 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1981.322691] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5250652a-92e8-925b-585e-597bd48ff2fc, 'name': SearchDatastore_Task, 'duration_secs': 0.010399} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.322883] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1981.323126] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1981.323374] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1981.323529] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1981.323712] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1981.323992] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b515cdd-fe92-4ce9-ad5a-03c5b117cb9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.332628] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1981.332814] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1981.333558] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cd0d688-4b97-4393-81b2-7045aef3ca79 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.342966] env[63279]: INFO nova.compute.manager [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Took 35.08 seconds to build instance. [ 1981.345693] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 1981.345693] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5287c4ff-a33a-08d5-905a-76db4d0f4bbb" [ 1981.345693] env[63279]: _type = "Task" [ 1981.345693] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.355668] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5287c4ff-a33a-08d5-905a-76db4d0f4bbb, 'name': SearchDatastore_Task, 'duration_secs': 0.008795} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.356801] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09bd50bc-5542-4869-9175-06c7624864f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.362575] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 1981.362575] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52fc9806-03b6-9d1b-32d4-bf57334c628f" [ 1981.362575] env[63279]: _type = "Task" [ 1981.362575] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.370554] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fc9806-03b6-9d1b-32d4-bf57334c628f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.427876] env[63279]: DEBUG nova.compute.manager [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1981.428216] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1981.429096] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57844eba-862f-403f-8e7b-53f34146b97c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.437379] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1981.437623] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59f49d07-0e1d-42fa-8831-ffb9fb1e1b6a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.443667] env[63279]: DEBUG oslo_vmware.api [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Waiting for the task: (returnval){ [ 1981.443667] env[63279]: value = "task-2087177" [ 1981.443667] env[63279]: _type = "Task" [ 1981.443667] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.451690] env[63279]: DEBUG oslo_vmware.api [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087177, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.492204] env[63279]: DEBUG nova.compute.manager [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1981.492501] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1981.493466] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93127c13-5333-40d1-a9c6-6b4501339114 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.502028] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Acquiring lock "3f888d81-e73f-4486-bb64-849c873449bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.502028] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Lock "3f888d81-e73f-4486-bb64-849c873449bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.502229] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Acquiring lock "3f888d81-e73f-4486-bb64-849c873449bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.502292] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Lock "3f888d81-e73f-4486-bb64-849c873449bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.502544] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Lock "3f888d81-e73f-4486-bb64-849c873449bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.504423] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1981.504423] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e92bbeb1-7662-4488-9bde-323c99a10f6a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.507051] env[63279]: INFO nova.compute.manager [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Terminating instance [ 1981.513142] env[63279]: DEBUG oslo_vmware.api [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Waiting for the task: (returnval){ [ 1981.513142] env[63279]: value = "task-2087178" [ 1981.513142] env[63279]: _type = "Task" [ 1981.513142] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.523733] env[63279]: DEBUG oslo_vmware.api [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087178, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.649060] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0e5192a-b319-4ffb-924f-54f949fdcc17 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "ad435281-55a0-418a-8400-5c461a5c15ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.239s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.653496] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 7db0c32d-36a4-4452-bb07-06de0c93ab50 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1981.654314] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1981.654430] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4032MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1981.845336] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d078f7b9-924c-4992-81c3-367c64f2edff tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.764s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.876274] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fc9806-03b6-9d1b-32d4-bf57334c628f, 'name': SearchDatastore_Task, 'duration_secs': 0.00878} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.879460] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1981.880365] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] eca98392-98be-405b-b799-463ef9ee3dc8/eca98392-98be-405b-b799-463ef9ee3dc8.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1981.880365] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-108cb5b6-7b1b-4332-8da3-f3d6f32713b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.893929] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 1981.893929] env[63279]: value = "task-2087179" [ 1981.893929] env[63279]: _type = "Task" [ 1981.893929] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.904646] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087179, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.955950] env[63279]: DEBUG oslo_vmware.api [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087177, 'name': PowerOffVM_Task, 'duration_secs': 0.268464} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.958459] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1981.958656] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1981.958946] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-605714fe-aeb7-47e0-9dc1-8823789ea9e3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.011641] env[63279]: DEBUG nova.compute.manager [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1982.011866] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1982.015416] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549b9cfc-0e80-45d2-a0c9-c3fa3a29191d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.029955] env[63279]: DEBUG oslo_vmware.api [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087178, 'name': PowerOffVM_Task, 'duration_secs': 0.217801} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.030246] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1982.030485] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1982.030663] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1982.030892] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0740428-9263-4ea5-83c2-6443a4aa4adc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.034543] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82ceb9a1-bca1-471c-8b91-47ac058da63c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.041724] env[63279]: DEBUG oslo_vmware.api [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Waiting for the task: (returnval){ [ 1982.041724] env[63279]: value = "task-2087181" [ 1982.041724] env[63279]: _type = "Task" [ 1982.041724] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.052057] env[63279]: DEBUG oslo_vmware.api [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087181, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.116236] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1982.116695] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1982.116892] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Deleting the datastore file [datastore1] df410051-d551-4a90-81f7-5630f5521a10 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1982.117567] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d81b20ed-04c9-492f-a054-b0e5ae5aa3f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.125123] env[63279]: DEBUG oslo_vmware.api [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Waiting for the task: (returnval){ [ 1982.125123] env[63279]: value = "task-2087183" [ 1982.125123] env[63279]: _type = "Task" [ 1982.125123] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.137491] env[63279]: DEBUG oslo_vmware.api [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087183, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.141196] env[63279]: DEBUG nova.network.neutron [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Updated VIF entry in instance network info cache for port d609f907-0c7e-4293-8b21-721e712e8dc2. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1982.141615] env[63279]: DEBUG nova.network.neutron [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Updating instance_info_cache with network_info: [{"id": "d609f907-0c7e-4293-8b21-721e712e8dc2", "address": "fa:16:3e:0e:eb:3d", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd609f907-0c", "ovs_interfaceid": "d609f907-0c7e-4293-8b21-721e712e8dc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.151559] env[63279]: DEBUG nova.compute.manager [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1982.226054] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1982.226339] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1982.226601] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Deleting the datastore file [datastore1] ba2d6111-d93d-4216-b641-864b542ea253 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1982.228045] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19795eda-c3ad-4174-9046-9c653088843a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.231166] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088d1999-395f-44f4-885e-bc811c7fc133 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.239069] env[63279]: DEBUG oslo_vmware.api [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Waiting for the task: (returnval){ [ 1982.239069] env[63279]: value = "task-2087184" [ 1982.239069] env[63279]: _type = "Task" [ 1982.239069] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.245408] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289dc7f5-c244-4aa5-af97-494f469ee6d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.255158] env[63279]: DEBUG oslo_vmware.api [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087184, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.287149] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331d9ac7-7bbd-4c1f-a4e4-1965da487001 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.295767] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dd4e70-42dd-4044-a5e6-533840f9dcc9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.310194] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1982.350852] env[63279]: DEBUG nova.compute.manager [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1982.403867] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087179, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506491} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.404924] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] eca98392-98be-405b-b799-463ef9ee3dc8/eca98392-98be-405b-b799-463ef9ee3dc8.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1982.405186] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1982.405461] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8bba4186-0cda-4d46-a91c-3084ca418f1c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.412808] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 1982.412808] env[63279]: value = "task-2087185" [ 1982.412808] env[63279]: _type = "Task" [ 1982.412808] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.423525] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087185, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.551724] env[63279]: DEBUG oslo_vmware.api [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087181, 'name': PowerOffVM_Task, 'duration_secs': 0.204277} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.552254] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1982.552483] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1982.552751] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-235f4286-8c3e-444a-9bb5-168912e7a139 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.635053] env[63279]: DEBUG oslo_vmware.api [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087183, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.648868] env[63279]: DEBUG oslo_concurrency.lockutils [req-c98ae075-4cd0-4ae5-84a7-00d2700f7ec5 req-60f5ce1c-1f7c-4bfb-8ef3-66a0eded5f7d service nova] Releasing lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1982.674650] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.751695] env[63279]: DEBUG oslo_vmware.api [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Task: {'id': task-2087184, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.467539} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.752769] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1982.752769] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1982.752769] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1982.752769] env[63279]: INFO nova.compute.manager [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1982.753063] env[63279]: DEBUG oslo.service.loopingcall [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1982.753165] env[63279]: DEBUG nova.compute.manager [-] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1982.753261] env[63279]: DEBUG nova.network.neutron [-] [instance: ba2d6111-d93d-4216-b641-864b542ea253] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1982.810791] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1982.810791] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1982.810791] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Deleting the datastore file [datastore1] 3f888d81-e73f-4486-bb64-849c873449bf {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1982.810791] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3bcc0c3b-c04c-4fa1-ba35-4df26ebfe6a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.813634] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1982.824014] env[63279]: DEBUG oslo_vmware.api [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Waiting for the task: (returnval){ [ 1982.824014] env[63279]: value = "task-2087187" [ 1982.824014] env[63279]: _type = "Task" [ 1982.824014] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.835163] env[63279]: DEBUG oslo_vmware.api [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087187, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.878285] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.925236] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087185, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.261664} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.925529] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1982.926354] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8eb089-feae-4f13-bf1f-d6e3cefe288a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.956541] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] eca98392-98be-405b-b799-463ef9ee3dc8/eca98392-98be-405b-b799-463ef9ee3dc8.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1982.956936] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8377daa7-0dd0-4c0f-976d-0d7633a00e1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.979047] env[63279]: DEBUG nova.compute.manager [req-d486d124-9418-4d27-9910-d524b3d32adb req-e8ecb5ce-9910-4fb7-98d8-666701a554e1 service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Received event network-changed-4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1982.979047] env[63279]: DEBUG nova.compute.manager [req-d486d124-9418-4d27-9910-d524b3d32adb req-e8ecb5ce-9910-4fb7-98d8-666701a554e1 service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Refreshing instance network info cache due to event network-changed-4160b9e6-5e90-458c-bb0f-afc6be383dc1. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1982.979425] env[63279]: DEBUG oslo_concurrency.lockutils [req-d486d124-9418-4d27-9910-d524b3d32adb req-e8ecb5ce-9910-4fb7-98d8-666701a554e1 service nova] Acquiring lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1982.979425] env[63279]: DEBUG oslo_concurrency.lockutils [req-d486d124-9418-4d27-9910-d524b3d32adb req-e8ecb5ce-9910-4fb7-98d8-666701a554e1 service nova] Acquired lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1982.979591] env[63279]: DEBUG nova.network.neutron [req-d486d124-9418-4d27-9910-d524b3d32adb req-e8ecb5ce-9910-4fb7-98d8-666701a554e1 service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Refreshing network info cache for port 4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1982.987428] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 1982.987428] env[63279]: value = "task-2087188" [ 1982.987428] env[63279]: _type = "Task" [ 1982.987428] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.998132] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087188, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.139721] env[63279]: DEBUG oslo_vmware.api [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Task: {'id': task-2087183, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.577314} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.140187] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1983.140452] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1983.140705] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1983.141174] env[63279]: INFO nova.compute.manager [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] [instance: df410051-d551-4a90-81f7-5630f5521a10] Took 1.71 seconds to destroy the instance on the hypervisor. [ 1983.141610] env[63279]: DEBUG oslo.service.loopingcall [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1983.142442] env[63279]: DEBUG nova.compute.manager [-] [instance: df410051-d551-4a90-81f7-5630f5521a10] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1983.142597] env[63279]: DEBUG nova.network.neutron [-] [instance: df410051-d551-4a90-81f7-5630f5521a10] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1983.319023] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1983.319833] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.286s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.320368] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.884s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.323749] env[63279]: INFO nova.compute.claims [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1983.348761] env[63279]: DEBUG oslo_vmware.api [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Task: {'id': task-2087187, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135393} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.349202] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1983.349520] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1983.349790] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1983.350206] env[63279]: INFO nova.compute.manager [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Took 1.34 seconds to destroy the instance on the hypervisor. [ 1983.350594] env[63279]: DEBUG oslo.service.loopingcall [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1983.350908] env[63279]: DEBUG nova.compute.manager [-] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1983.351065] env[63279]: DEBUG nova.network.neutron [-] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1983.497053] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087188, 'name': ReconfigVM_Task, 'duration_secs': 0.41014} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.497355] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Reconfigured VM instance instance-0000002f to attach disk [datastore1] eca98392-98be-405b-b799-463ef9ee3dc8/eca98392-98be-405b-b799-463ef9ee3dc8.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1983.498015] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb125d40-8f13-4450-9d29-290309cebbcb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.505462] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 1983.505462] env[63279]: value = "task-2087189" [ 1983.505462] env[63279]: _type = "Task" [ 1983.505462] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.513626] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087189, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.622339] env[63279]: DEBUG nova.network.neutron [-] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1983.903142] env[63279]: INFO nova.compute.manager [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Rebuilding instance [ 1983.951629] env[63279]: DEBUG nova.compute.manager [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1983.952735] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e31fd3-000f-416a-b945-790591d75c44 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.016285] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087189, 'name': Rename_Task, 'duration_secs': 0.218608} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.016601] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1984.016827] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-083094d8-e522-4459-aee7-38a17b181b41 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.023571] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 1984.023571] env[63279]: value = "task-2087190" [ 1984.023571] env[63279]: _type = "Task" [ 1984.023571] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.031890] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087190, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.111941] env[63279]: DEBUG nova.network.neutron [req-d486d124-9418-4d27-9910-d524b3d32adb req-e8ecb5ce-9910-4fb7-98d8-666701a554e1 service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Updated VIF entry in instance network info cache for port 4160b9e6-5e90-458c-bb0f-afc6be383dc1. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1984.112354] env[63279]: DEBUG nova.network.neutron [req-d486d124-9418-4d27-9910-d524b3d32adb req-e8ecb5ce-9910-4fb7-98d8-666701a554e1 service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Updating instance_info_cache with network_info: [{"id": "4160b9e6-5e90-458c-bb0f-afc6be383dc1", "address": "fa:16:3e:0b:30:a6", "network": {"id": "948d327b-554a-4c1d-a483-9a067d60f6bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1383523654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba92e8896d7a4605bec96ce7ee7d4a4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4160b9e6-5e", "ovs_interfaceid": "4160b9e6-5e90-458c-bb0f-afc6be383dc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1984.125031] env[63279]: DEBUG nova.network.neutron [-] [instance: df410051-d551-4a90-81f7-5630f5521a10] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1984.128761] env[63279]: INFO nova.compute.manager [-] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Took 1.37 seconds to deallocate network for instance. [ 1984.255463] env[63279]: DEBUG nova.network.neutron [-] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1984.331131] env[63279]: DEBUG nova.compute.manager [req-efbda5c4-0383-4386-9b79-dd263960617d req-bf83f4bf-551c-4328-ba8b-b38dd056aa63 service nova] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Received event network-vif-deleted-77f48a61-90d3-4ff9-b672-52bd1900e2a2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1984.538647] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087190, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.615052] env[63279]: DEBUG oslo_concurrency.lockutils [req-d486d124-9418-4d27-9910-d524b3d32adb req-e8ecb5ce-9910-4fb7-98d8-666701a554e1 service nova] Releasing lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.631064] env[63279]: INFO nova.compute.manager [-] [instance: df410051-d551-4a90-81f7-5630f5521a10] Took 1.49 seconds to deallocate network for instance. [ 1984.635781] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1984.758704] env[63279]: INFO nova.compute.manager [-] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Took 1.41 seconds to deallocate network for instance. [ 1984.933081] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6afdb2cd-7a3c-4ebf-831f-5f4b4ae3f76e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.941946] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cda8a34-f08f-4a4c-9129-dd53f66ba797 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.975748] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1984.976195] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a80738da-0c25-42b2-bf84-1dc72da3a09c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.978393] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ab523c-0062-4bbc-880d-50ef28068db7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.987045] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edfd676-ae9f-4d65-813c-bf04adb33a97 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.991183] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1984.991183] env[63279]: value = "task-2087191" [ 1984.991183] env[63279]: _type = "Task" [ 1984.991183] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.002565] env[63279]: DEBUG nova.compute.provider_tree [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1985.009189] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087191, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.035757] env[63279]: DEBUG oslo_vmware.api [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087190, 'name': PowerOnVM_Task, 'duration_secs': 0.733849} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.036156] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1985.036404] env[63279]: INFO nova.compute.manager [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Took 7.99 seconds to spawn the instance on the hypervisor. [ 1985.036717] env[63279]: DEBUG nova.compute.manager [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1985.037499] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3876b4ab-5835-4dc4-9840-805291a08c32 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.146303] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.267867] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.292647] env[63279]: DEBUG nova.compute.manager [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Received event network-vif-deleted-13f38e75-c220-4029-974f-96f6308c35c6 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1985.292866] env[63279]: DEBUG nova.compute.manager [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] [instance: df410051-d551-4a90-81f7-5630f5521a10] Received event network-vif-deleted-cd9181e2-70ee-4534-8313-5f40cf4f8f9c {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1985.293051] env[63279]: DEBUG nova.compute.manager [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Received event network-changed-4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1985.293249] env[63279]: DEBUG nova.compute.manager [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Refreshing instance network info cache due to event network-changed-4160b9e6-5e90-458c-bb0f-afc6be383dc1. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1985.293426] env[63279]: DEBUG oslo_concurrency.lockutils [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] Acquiring lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.293566] env[63279]: DEBUG oslo_concurrency.lockutils [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] Acquired lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.293722] env[63279]: DEBUG nova.network.neutron [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Refreshing network info cache for port 4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1985.501703] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087191, 'name': PowerOffVM_Task, 'duration_secs': 0.183733} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.501996] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1985.502256] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1985.503078] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc024db3-9bcb-4ca4-b459-54e7ce753b55 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.506435] env[63279]: DEBUG nova.scheduler.client.report [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1985.516055] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1985.516367] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71e77fca-a528-43aa-8db6-3b920ff55167 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.557123] env[63279]: INFO nova.compute.manager [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Took 38.26 seconds to build instance. [ 1985.852213] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1985.852457] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1985.852699] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleting the datastore file [datastore1] ad435281-55a0-418a-8400-5c461a5c15ef {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1985.855236] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89b08163-c127-4ed4-b47f-816856834a9f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.863862] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1985.863862] env[63279]: value = "task-2087193" [ 1985.863862] env[63279]: _type = "Task" [ 1985.863862] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.871550] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087193, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.012223] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.692s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.016022] env[63279]: DEBUG nova.compute.manager [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1986.016022] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.249s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.016909] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.019185] env[63279]: DEBUG oslo_concurrency.lockutils [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.052s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.020216] env[63279]: DEBUG oslo_concurrency.lockutils [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.021589] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.657s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.021883] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.027380] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.658s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.027601] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.032472] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.990s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.032472] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.033539] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.472s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.035089] env[63279]: INFO nova.compute.claims [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1986.059785] env[63279]: DEBUG oslo_concurrency.lockutils [None req-387411dd-d774-4ddb-8c67-e6827dd0c0cb tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "eca98392-98be-405b-b799-463ef9ee3dc8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.722s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.082343] env[63279]: INFO nova.scheduler.client.report [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Deleted allocations for instance 1a55008a-4d8c-403d-a8f4-966aa7346f4c [ 1986.087052] env[63279]: INFO nova.scheduler.client.report [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Deleted allocations for instance ecedded1-7169-49a4-8a9e-2fe4086db986 [ 1986.107572] env[63279]: INFO nova.scheduler.client.report [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Deleted allocations for instance de543869-8ab1-40ed-8f6d-dc506c257843 [ 1986.116903] env[63279]: INFO nova.scheduler.client.report [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Deleted allocations for instance fcc5a636-554f-424e-a604-a8e7bd7cf574 [ 1986.133278] env[63279]: INFO nova.scheduler.client.report [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Deleted allocations for instance 81103d53-99fe-4d1a-816f-7685c59c80ee [ 1986.218501] env[63279]: DEBUG nova.network.neutron [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Updated VIF entry in instance network info cache for port 4160b9e6-5e90-458c-bb0f-afc6be383dc1. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1986.218906] env[63279]: DEBUG nova.network.neutron [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Updating instance_info_cache with network_info: [{"id": "4160b9e6-5e90-458c-bb0f-afc6be383dc1", "address": "fa:16:3e:0b:30:a6", "network": {"id": "948d327b-554a-4c1d-a483-9a067d60f6bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1383523654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba92e8896d7a4605bec96ce7ee7d4a4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4160b9e6-5e", "ovs_interfaceid": "4160b9e6-5e90-458c-bb0f-afc6be383dc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1986.372848] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087193, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.337593} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.373278] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1986.373502] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1986.373566] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1986.542645] env[63279]: DEBUG nova.compute.utils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1986.544475] env[63279]: DEBUG nova.compute.manager [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1986.544635] env[63279]: DEBUG nova.network.neutron [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1986.562414] env[63279]: DEBUG nova.compute.manager [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1986.596950] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fef68aa8-4469-4e71-b0f4-6afc3e4409aa tempest-ServersTestManualDisk-725019117 tempest-ServersTestManualDisk-725019117-project-member] Lock "ecedded1-7169-49a4-8a9e-2fe4086db986" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.067s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.600502] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5b55e395-c1a6-4f2a-ad2f-483781b8bb67 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "1a55008a-4d8c-403d-a8f4-966aa7346f4c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.034s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.604049] env[63279]: DEBUG nova.policy [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8e9485721a642f6b056e27f42c73958', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0fd8bdc7d8664db698236f56d82adcf0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1986.618920] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54fd8829-eaaf-408f-9e32-c8514ef3c821 tempest-InstanceActionsTestJSON-1583999850 tempest-InstanceActionsTestJSON-1583999850-project-member] Lock "de543869-8ab1-40ed-8f6d-dc506c257843" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.353s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.623883] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cd3a3546-110f-4a9d-aad2-eb6e674d439f tempest-AttachInterfacesUnderV243Test-18748603 tempest-AttachInterfacesUnderV243Test-18748603-project-member] Lock "fcc5a636-554f-424e-a604-a8e7bd7cf574" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.944s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.645750] env[63279]: DEBUG oslo_concurrency.lockutils [None req-983fb64c-2d77-4322-9ea8-58ba6eceb42b tempest-DeleteServersAdminTestJSON-778262859 tempest-DeleteServersAdminTestJSON-778262859-project-member] Lock "81103d53-99fe-4d1a-816f-7685c59c80ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.606s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.723261] env[63279]: DEBUG oslo_concurrency.lockutils [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] Releasing lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1986.723261] env[63279]: DEBUG nova.compute.manager [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Received event network-changed-568079c6-5b60-4517-9e83-f526ccdaa586 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1986.723261] env[63279]: DEBUG nova.compute.manager [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Refreshing instance network info cache due to event network-changed-568079c6-5b60-4517-9e83-f526ccdaa586. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1986.723261] env[63279]: DEBUG oslo_concurrency.lockutils [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] Acquiring lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1986.723501] env[63279]: DEBUG oslo_concurrency.lockutils [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] Acquired lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1986.723614] env[63279]: DEBUG nova.network.neutron [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Refreshing network info cache for port 568079c6-5b60-4517-9e83-f526ccdaa586 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1986.801819] env[63279]: WARNING oslo_messaging._drivers.amqpdriver [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1987.049824] env[63279]: DEBUG nova.compute.manager [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1987.095459] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.134261] env[63279]: DEBUG nova.network.neutron [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Successfully created port: 271bdcf3-0171-47b9-8e8f-e190604053fd {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1987.303905] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.304194] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.304475] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.304625] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.304801] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.309838] env[63279]: INFO nova.compute.manager [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Terminating instance [ 1987.428220] env[63279]: DEBUG nova.virt.hardware [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1987.428560] env[63279]: DEBUG nova.virt.hardware [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1987.428682] env[63279]: DEBUG nova.virt.hardware [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1987.429080] env[63279]: DEBUG nova.virt.hardware [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1987.429534] env[63279]: DEBUG nova.virt.hardware [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1987.429704] env[63279]: DEBUG nova.virt.hardware [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1987.430125] env[63279]: DEBUG nova.virt.hardware [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1987.430340] env[63279]: DEBUG nova.virt.hardware [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1987.430995] env[63279]: DEBUG nova.virt.hardware [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1987.430995] env[63279]: DEBUG nova.virt.hardware [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1987.431152] env[63279]: DEBUG nova.virt.hardware [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1987.432072] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa45e732-da2a-4d9e-82e0-3d149eff20d5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.445285] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc4c7d5-30ad-4632-91e6-2f6f6b0cb8f0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.461584] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:b8:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '699a008e-52f8-4319-9e86-01acc5c037f9', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1987.470816] env[63279]: DEBUG oslo.service.loopingcall [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1987.474928] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1987.475406] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b530faf2-57f5-4c19-8be9-15407dcbc8e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.502295] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1987.502295] env[63279]: value = "task-2087194" [ 1987.502295] env[63279]: _type = "Task" [ 1987.502295] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.510628] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087194, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.647320] env[63279]: DEBUG nova.compute.manager [req-9ddce6fa-6905-42b0-9c87-4491654b7763 req-5030ee6f-eea5-4d84-9916-53cb925d0302 service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Received event network-changed-568079c6-5b60-4517-9e83-f526ccdaa586 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1987.647521] env[63279]: DEBUG nova.compute.manager [req-9ddce6fa-6905-42b0-9c87-4491654b7763 req-5030ee6f-eea5-4d84-9916-53cb925d0302 service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Refreshing instance network info cache due to event network-changed-568079c6-5b60-4517-9e83-f526ccdaa586. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1987.647718] env[63279]: DEBUG oslo_concurrency.lockutils [req-9ddce6fa-6905-42b0-9c87-4491654b7763 req-5030ee6f-eea5-4d84-9916-53cb925d0302 service nova] Acquiring lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1987.686793] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf9a27f-1c69-433d-9cbc-4fb7c4cdc579 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.694862] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b336781-834f-482a-9d69-1e8717214d11 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.731602] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0b9650-dbee-4d10-9233-cc62583e13a9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.739890] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb01552-31eb-49b0-8af1-99d25e9c9ed6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.754877] env[63279]: DEBUG nova.compute.provider_tree [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1987.815214] env[63279]: DEBUG nova.compute.manager [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1987.815476] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1987.816614] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1011a7-7d7b-447e-97d2-3add9f92f1a9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.825969] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1987.825969] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bbb34a0e-88bb-4f05-b8c2-5d4fd7aad23b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.833188] env[63279]: DEBUG oslo_vmware.api [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1987.833188] env[63279]: value = "task-2087195" [ 1987.833188] env[63279]: _type = "Task" [ 1987.833188] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.841569] env[63279]: DEBUG oslo_vmware.api [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087195, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.886494] env[63279]: DEBUG nova.network.neutron [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Updated VIF entry in instance network info cache for port 568079c6-5b60-4517-9e83-f526ccdaa586. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1987.886880] env[63279]: DEBUG nova.network.neutron [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Updating instance_info_cache with network_info: [{"id": "568079c6-5b60-4517-9e83-f526ccdaa586", "address": "fa:16:3e:e1:18:dd", "network": {"id": "948d327b-554a-4c1d-a483-9a067d60f6bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1383523654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba92e8896d7a4605bec96ce7ee7d4a4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap568079c6-5b", "ovs_interfaceid": "568079c6-5b60-4517-9e83-f526ccdaa586", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1988.015157] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087194, 'name': CreateVM_Task, 'duration_secs': 0.446376} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.015394] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1988.017212] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1988.017628] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1988.018274] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1988.018686] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62526935-c947-472c-a09a-0ddf26a6effd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.024360] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1988.024360] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f02eda-ed1a-14ed-3dea-b023767b2f40" [ 1988.024360] env[63279]: _type = "Task" [ 1988.024360] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.041401] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f02eda-ed1a-14ed-3dea-b023767b2f40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.062916] env[63279]: DEBUG nova.compute.manager [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1988.097578] env[63279]: DEBUG nova.virt.hardware [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1988.099954] env[63279]: DEBUG nova.virt.hardware [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1988.099954] env[63279]: DEBUG nova.virt.hardware [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1988.099954] env[63279]: DEBUG nova.virt.hardware [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1988.099954] env[63279]: DEBUG nova.virt.hardware [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1988.099954] env[63279]: DEBUG nova.virt.hardware [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1988.100259] env[63279]: DEBUG nova.virt.hardware [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1988.100259] env[63279]: DEBUG nova.virt.hardware [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1988.100259] env[63279]: DEBUG nova.virt.hardware [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1988.100259] env[63279]: DEBUG nova.virt.hardware [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1988.100259] env[63279]: DEBUG nova.virt.hardware [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1988.101258] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898c939a-a7d1-4743-8261-8399b82ecf7c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.111707] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3687dd2-7c89-4b14-a36e-6425e6b20c6d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.181657] env[63279]: DEBUG nova.compute.manager [req-746d70eb-f74c-40d7-8287-e8d93f938985 req-48584421-9976-410c-9e5d-725591fa0d69 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Received event network-changed-d609f907-0c7e-4293-8b21-721e712e8dc2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1988.181885] env[63279]: DEBUG nova.compute.manager [req-746d70eb-f74c-40d7-8287-e8d93f938985 req-48584421-9976-410c-9e5d-725591fa0d69 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Refreshing instance network info cache due to event network-changed-d609f907-0c7e-4293-8b21-721e712e8dc2. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1988.182571] env[63279]: DEBUG oslo_concurrency.lockutils [req-746d70eb-f74c-40d7-8287-e8d93f938985 req-48584421-9976-410c-9e5d-725591fa0d69 service nova] Acquiring lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1988.182831] env[63279]: DEBUG oslo_concurrency.lockutils [req-746d70eb-f74c-40d7-8287-e8d93f938985 req-48584421-9976-410c-9e5d-725591fa0d69 service nova] Acquired lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1988.183088] env[63279]: DEBUG nova.network.neutron [req-746d70eb-f74c-40d7-8287-e8d93f938985 req-48584421-9976-410c-9e5d-725591fa0d69 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Refreshing network info cache for port d609f907-0c7e-4293-8b21-721e712e8dc2 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1988.281582] env[63279]: ERROR nova.scheduler.client.report [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [req-a3cc7724-ad09-4f8a-a496-893e068239be] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a3cc7724-ad09-4f8a-a496-893e068239be"}]} [ 1988.303281] env[63279]: DEBUG nova.scheduler.client.report [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1988.322664] env[63279]: DEBUG nova.scheduler.client.report [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1988.323018] env[63279]: DEBUG nova.compute.provider_tree [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1988.345866] env[63279]: DEBUG oslo_vmware.api [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087195, 'name': PowerOffVM_Task, 'duration_secs': 0.224968} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.347588] env[63279]: DEBUG nova.scheduler.client.report [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1988.350288] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1988.351609] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1988.351914] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4f7496f-be5e-47d5-9a71-6db24a49f75d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.373295] env[63279]: DEBUG nova.scheduler.client.report [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1988.391301] env[63279]: DEBUG oslo_concurrency.lockutils [req-f8a55dae-2bfa-4353-ac4d-8681f364bb30 req-18a181bf-171a-45de-9f84-fc425162bd7e service nova] Releasing lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1988.392252] env[63279]: DEBUG oslo_concurrency.lockutils [req-9ddce6fa-6905-42b0-9c87-4491654b7763 req-5030ee6f-eea5-4d84-9916-53cb925d0302 service nova] Acquired lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1988.392487] env[63279]: DEBUG nova.network.neutron [req-9ddce6fa-6905-42b0-9c87-4491654b7763 req-5030ee6f-eea5-4d84-9916-53cb925d0302 service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Refreshing network info cache for port 568079c6-5b60-4517-9e83-f526ccdaa586 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1988.490109] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1988.494545] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1988.494935] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Deleting the datastore file [datastore1] 79032b2a-74f7-4c6d-8f71-f848fe372ba2 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1988.495374] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d35746a4-3c80-4000-9b11-f76beceacb63 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.503520] env[63279]: DEBUG oslo_vmware.api [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 1988.503520] env[63279]: value = "task-2087197" [ 1988.503520] env[63279]: _type = "Task" [ 1988.503520] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.516921] env[63279]: DEBUG oslo_vmware.api [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087197, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.551052] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f02eda-ed1a-14ed-3dea-b023767b2f40, 'name': SearchDatastore_Task, 'duration_secs': 0.014607} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.552298] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1988.555025] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1988.555025] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1988.555025] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1988.555025] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1988.555025] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6fcb38b1-c872-4b77-8b43-ad1b5b39ea73 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.571347] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1988.571642] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1988.572977] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac0a39a0-ec11-4aae-8563-58cf04569c14 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.580745] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1988.580745] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527f7cf4-b6c7-f99a-1c07-3fdd3bd53d95" [ 1988.580745] env[63279]: _type = "Task" [ 1988.580745] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.599010] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527f7cf4-b6c7-f99a-1c07-3fdd3bd53d95, 'name': SearchDatastore_Task, 'duration_secs': 0.009483} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.600411] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b223d626-17e2-4a63-9746-0fba9c22ffb8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.608128] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1988.608128] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52883f3b-52f7-9693-93fb-a73073340164" [ 1988.608128] env[63279]: _type = "Task" [ 1988.608128] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.626293] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52883f3b-52f7-9693-93fb-a73073340164, 'name': SearchDatastore_Task, 'duration_secs': 0.00932} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.626573] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1988.626853] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ad435281-55a0-418a-8400-5c461a5c15ef/ad435281-55a0-418a-8400-5c461a5c15ef.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1988.627157] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f449f6d-bf66-444a-be77-2faf634165ab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.633667] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1988.633667] env[63279]: value = "task-2087198" [ 1988.633667] env[63279]: _type = "Task" [ 1988.633667] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.642117] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087198, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.656638] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5271999c-8cdf-856a-306f-3ec19d4812a6/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1988.656997] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e14c62-c32b-44d2-b3ea-02c67e1094a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.669046] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5271999c-8cdf-856a-306f-3ec19d4812a6/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1988.669046] env[63279]: ERROR oslo_vmware.rw_handles [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5271999c-8cdf-856a-306f-3ec19d4812a6/disk-0.vmdk due to incomplete transfer. [ 1988.669046] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-16f43825-6191-4be7-a2a8-7d314f2203ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.676885] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5271999c-8cdf-856a-306f-3ec19d4812a6/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1988.677125] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Uploaded image e646b39f-4fee-487a-bfeb-9b7128ee4e8b to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1988.679860] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1988.682913] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-fb356563-e7f2-410c-a390-5c7263c83257 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.689352] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1988.689352] env[63279]: value = "task-2087199" [ 1988.689352] env[63279]: _type = "Task" [ 1988.689352] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.698235] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087199, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.018097] env[63279]: DEBUG oslo_vmware.api [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087197, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182059} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.019937] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1989.019937] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1989.019937] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1989.020271] env[63279]: INFO nova.compute.manager [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1989.020354] env[63279]: DEBUG oslo.service.loopingcall [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1989.021301] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caaec631-ab3d-48f7-8f8f-6d20183a22c7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.025306] env[63279]: DEBUG nova.compute.manager [-] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1989.025306] env[63279]: DEBUG nova.network.neutron [-] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1989.035100] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6601b747-87ff-4040-8475-2350731c5f0b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.075751] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5205fe07-775a-431e-9b35-ba0f3b338fc6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.087879] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4e4d76-b1f4-4b18-ba80-5f067029ecc0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.103220] env[63279]: DEBUG nova.compute.provider_tree [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1989.150266] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087198, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.198867] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087199, 'name': Destroy_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.617247] env[63279]: DEBUG nova.network.neutron [req-746d70eb-f74c-40d7-8287-e8d93f938985 req-48584421-9976-410c-9e5d-725591fa0d69 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Updated VIF entry in instance network info cache for port d609f907-0c7e-4293-8b21-721e712e8dc2. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1989.621481] env[63279]: DEBUG nova.network.neutron [req-746d70eb-f74c-40d7-8287-e8d93f938985 req-48584421-9976-410c-9e5d-725591fa0d69 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Updating instance_info_cache with network_info: [{"id": "d609f907-0c7e-4293-8b21-721e712e8dc2", "address": "fa:16:3e:0e:eb:3d", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd609f907-0c", "ovs_interfaceid": "d609f907-0c7e-4293-8b21-721e712e8dc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1989.635499] env[63279]: DEBUG nova.network.neutron [req-9ddce6fa-6905-42b0-9c87-4491654b7763 req-5030ee6f-eea5-4d84-9916-53cb925d0302 service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Updated VIF entry in instance network info cache for port 568079c6-5b60-4517-9e83-f526ccdaa586. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1989.635855] env[63279]: DEBUG nova.network.neutron [req-9ddce6fa-6905-42b0-9c87-4491654b7763 req-5030ee6f-eea5-4d84-9916-53cb925d0302 service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Updating instance_info_cache with network_info: [{"id": "568079c6-5b60-4517-9e83-f526ccdaa586", "address": "fa:16:3e:e1:18:dd", "network": {"id": "948d327b-554a-4c1d-a483-9a067d60f6bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1383523654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba92e8896d7a4605bec96ce7ee7d4a4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap568079c6-5b", "ovs_interfaceid": "568079c6-5b60-4517-9e83-f526ccdaa586", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1989.648705] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087198, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553765} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.648802] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ad435281-55a0-418a-8400-5c461a5c15ef/ad435281-55a0-418a-8400-5c461a5c15ef.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1989.649206] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1989.649705] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1855931-9865-4b60-8f50-a9d05154d09c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.654549] env[63279]: DEBUG nova.scheduler.client.report [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 73 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1989.654792] env[63279]: DEBUG nova.compute.provider_tree [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 73 to 74 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1989.654980] env[63279]: DEBUG nova.compute.provider_tree [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1989.665777] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1989.665777] env[63279]: value = "task-2087200" [ 1989.665777] env[63279]: _type = "Task" [ 1989.665777] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.677693] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087200, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.698993] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087199, 'name': Destroy_Task, 'duration_secs': 0.510655} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.699316] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Destroyed the VM [ 1989.699569] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1989.699820] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-85a6199a-4e72-40d2-a406-e1534bb775aa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.705967] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 1989.705967] env[63279]: value = "task-2087201" [ 1989.705967] env[63279]: _type = "Task" [ 1989.705967] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.714953] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087201, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.809317] env[63279]: DEBUG nova.network.neutron [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Successfully updated port: 271bdcf3-0171-47b9-8e8f-e190604053fd {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1989.940073] env[63279]: DEBUG nova.compute.manager [req-c4784ecd-f326-4a77-b85e-1410d9c966aa req-163b8f57-998f-440e-b1ad-568244acd65b service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Received event network-changed-4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1989.940333] env[63279]: DEBUG nova.compute.manager [req-c4784ecd-f326-4a77-b85e-1410d9c966aa req-163b8f57-998f-440e-b1ad-568244acd65b service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Refreshing instance network info cache due to event network-changed-4160b9e6-5e90-458c-bb0f-afc6be383dc1. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1989.940539] env[63279]: DEBUG oslo_concurrency.lockutils [req-c4784ecd-f326-4a77-b85e-1410d9c966aa req-163b8f57-998f-440e-b1ad-568244acd65b service nova] Acquiring lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1989.940861] env[63279]: DEBUG oslo_concurrency.lockutils [req-c4784ecd-f326-4a77-b85e-1410d9c966aa req-163b8f57-998f-440e-b1ad-568244acd65b service nova] Acquired lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1989.940861] env[63279]: DEBUG nova.network.neutron [req-c4784ecd-f326-4a77-b85e-1410d9c966aa req-163b8f57-998f-440e-b1ad-568244acd65b service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Refreshing network info cache for port 4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1990.123961] env[63279]: DEBUG oslo_concurrency.lockutils [req-746d70eb-f74c-40d7-8287-e8d93f938985 req-48584421-9976-410c-9e5d-725591fa0d69 service nova] Releasing lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1990.138662] env[63279]: DEBUG oslo_concurrency.lockutils [req-9ddce6fa-6905-42b0-9c87-4491654b7763 req-5030ee6f-eea5-4d84-9916-53cb925d0302 service nova] Releasing lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1990.162039] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.128s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.162976] env[63279]: DEBUG nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1990.166800] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.476s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1990.167059] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.170549] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.973s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1990.172264] env[63279]: INFO nova.compute.claims [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1990.185461] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087200, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058048} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.185723] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1990.187199] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3c0212-6312-4721-b884-af963d87d631 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.218535] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] ad435281-55a0-418a-8400-5c461a5c15ef/ad435281-55a0-418a-8400-5c461a5c15ef.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1990.221157] env[63279]: INFO nova.scheduler.client.report [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Deleted allocations for instance ff2f355a-9687-4491-b243-6133e4b7b866 [ 1990.225020] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4d0cdda-d845-4514-aaaa-864a7def6d9d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.256241] env[63279]: DEBUG oslo_vmware.api [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087201, 'name': RemoveSnapshot_Task, 'duration_secs': 0.480154} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.257483] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1990.257717] env[63279]: INFO nova.compute.manager [None req-1478564c-2854-499f-8d64-33efae3e3b9b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Took 17.80 seconds to snapshot the instance on the hypervisor. [ 1990.260018] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1990.260018] env[63279]: value = "task-2087202" [ 1990.260018] env[63279]: _type = "Task" [ 1990.260018] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.269148] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087202, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.317126] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "refresh_cache-32789822-cb54-43e7-beae-b5ed3002f4ad" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1990.317126] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquired lock "refresh_cache-32789822-cb54-43e7-beae-b5ed3002f4ad" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1990.317126] env[63279]: DEBUG nova.network.neutron [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1990.426374] env[63279]: DEBUG nova.network.neutron [-] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1990.682215] env[63279]: DEBUG nova.compute.utils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1990.682215] env[63279]: DEBUG nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1990.682215] env[63279]: DEBUG nova.network.neutron [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1990.756043] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f8ada2f-3ee5-41f1-9418-e9beb222761d tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "ff2f355a-9687-4491-b243-6133e4b7b866" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.588s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.768319] env[63279]: DEBUG nova.policy [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37e32e1b25d1432aadda8ab95694aca0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70c8735ebd5740c4b8b4d0cf8635da71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1990.780146] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087202, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.881851] env[63279]: DEBUG nova.network.neutron [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1990.932723] env[63279]: INFO nova.compute.manager [-] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Took 1.91 seconds to deallocate network for instance. [ 1990.953715] env[63279]: DEBUG nova.compute.manager [req-d81b7ef9-1b89-4197-87ce-5369b21e8941 req-4af6bafa-b5ef-4e8c-b179-ffe1d766db26 service nova] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Received event network-vif-deleted-568079c6-5b60-4517-9e83-f526ccdaa586 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1991.031242] env[63279]: DEBUG nova.network.neutron [req-c4784ecd-f326-4a77-b85e-1410d9c966aa req-163b8f57-998f-440e-b1ad-568244acd65b service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Updated VIF entry in instance network info cache for port 4160b9e6-5e90-458c-bb0f-afc6be383dc1. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1991.031242] env[63279]: DEBUG nova.network.neutron [req-c4784ecd-f326-4a77-b85e-1410d9c966aa req-163b8f57-998f-440e-b1ad-568244acd65b service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Updating instance_info_cache with network_info: [{"id": "4160b9e6-5e90-458c-bb0f-afc6be383dc1", "address": "fa:16:3e:0b:30:a6", "network": {"id": "948d327b-554a-4c1d-a483-9a067d60f6bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1383523654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba92e8896d7a4605bec96ce7ee7d4a4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4160b9e6-5e", "ovs_interfaceid": "4160b9e6-5e90-458c-bb0f-afc6be383dc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1991.191621] env[63279]: DEBUG nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1991.275491] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087202, 'name': ReconfigVM_Task, 'duration_secs': 0.691038} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.276549] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Reconfigured VM instance instance-0000002d to attach disk [datastore1] ad435281-55a0-418a-8400-5c461a5c15ef/ad435281-55a0-418a-8400-5c461a5c15ef.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1991.280582] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2004610-110c-4c95-800c-0afe8e385212 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.290949] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1991.290949] env[63279]: value = "task-2087203" [ 1991.290949] env[63279]: _type = "Task" [ 1991.290949] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.299553] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087203, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.440512] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.535614] env[63279]: DEBUG oslo_concurrency.lockutils [req-c4784ecd-f326-4a77-b85e-1410d9c966aa req-163b8f57-998f-440e-b1ad-568244acd65b service nova] Releasing lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1991.535871] env[63279]: DEBUG nova.compute.manager [req-c4784ecd-f326-4a77-b85e-1410d9c966aa req-163b8f57-998f-440e-b1ad-568244acd65b service nova] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Received event network-vif-plugged-271bdcf3-0171-47b9-8e8f-e190604053fd {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1991.536099] env[63279]: DEBUG oslo_concurrency.lockutils [req-c4784ecd-f326-4a77-b85e-1410d9c966aa req-163b8f57-998f-440e-b1ad-568244acd65b service nova] Acquiring lock "32789822-cb54-43e7-beae-b5ed3002f4ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.536368] env[63279]: DEBUG oslo_concurrency.lockutils [req-c4784ecd-f326-4a77-b85e-1410d9c966aa req-163b8f57-998f-440e-b1ad-568244acd65b service nova] Lock "32789822-cb54-43e7-beae-b5ed3002f4ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.536550] env[63279]: DEBUG oslo_concurrency.lockutils [req-c4784ecd-f326-4a77-b85e-1410d9c966aa req-163b8f57-998f-440e-b1ad-568244acd65b service nova] Lock "32789822-cb54-43e7-beae-b5ed3002f4ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1991.536787] env[63279]: DEBUG nova.compute.manager [req-c4784ecd-f326-4a77-b85e-1410d9c966aa req-163b8f57-998f-440e-b1ad-568244acd65b service nova] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] No waiting events found dispatching network-vif-plugged-271bdcf3-0171-47b9-8e8f-e190604053fd {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1991.536989] env[63279]: WARNING nova.compute.manager [req-c4784ecd-f326-4a77-b85e-1410d9c966aa req-163b8f57-998f-440e-b1ad-568244acd65b service nova] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Received unexpected event network-vif-plugged-271bdcf3-0171-47b9-8e8f-e190604053fd for instance with vm_state building and task_state spawning. [ 1991.570336] env[63279]: DEBUG nova.network.neutron [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Updating instance_info_cache with network_info: [{"id": "271bdcf3-0171-47b9-8e8f-e190604053fd", "address": "fa:16:3e:0e:9e:e4", "network": {"id": "a7b83e75-3b16-41db-9395-90dead128e80", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-756195345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd8bdc7d8664db698236f56d82adcf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap271bdcf3-01", "ovs_interfaceid": "271bdcf3-0171-47b9-8e8f-e190604053fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1991.731985] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b9e800-31d8-4c69-a75c-c8288e2b7406 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.740172] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b00ce1-da15-4f07-805f-12c026d7949c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.778768] env[63279]: DEBUG nova.network.neutron [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Successfully created port: a55a63d0-c628-4d90-b3dc-d8f0bf564e95 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1991.782392] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32805d24-8aa7-43e0-8366-dcdfcac292d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.791669] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c382ebed-316a-4cd9-b66d-1aeab081af55 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.804046] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087203, 'name': Rename_Task, 'duration_secs': 0.178164} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.814507] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1991.815339] env[63279]: DEBUG nova.compute.provider_tree [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1991.816682] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edb2c8b3-6d30-45e3-abe7-975b094062ff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.824854] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1991.824854] env[63279]: value = "task-2087204" [ 1991.824854] env[63279]: _type = "Task" [ 1991.824854] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.833643] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087204, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.076018] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Releasing lock "refresh_cache-32789822-cb54-43e7-beae-b5ed3002f4ad" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1992.076018] env[63279]: DEBUG nova.compute.manager [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Instance network_info: |[{"id": "271bdcf3-0171-47b9-8e8f-e190604053fd", "address": "fa:16:3e:0e:9e:e4", "network": {"id": "a7b83e75-3b16-41db-9395-90dead128e80", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-756195345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd8bdc7d8664db698236f56d82adcf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap271bdcf3-01", "ovs_interfaceid": "271bdcf3-0171-47b9-8e8f-e190604053fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1992.076358] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:9e:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53955f0e-c162-4cef-8bd5-335b369c36b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '271bdcf3-0171-47b9-8e8f-e190604053fd', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1992.086761] env[63279]: DEBUG oslo.service.loopingcall [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1992.087723] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1992.088129] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50345468-0e40-4b4e-ae37-eedad370a682 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.111296] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1992.111296] env[63279]: value = "task-2087205" [ 1992.111296] env[63279]: _type = "Task" [ 1992.111296] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.121351] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.206133] env[63279]: DEBUG nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1992.243033] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1992.243350] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1992.243518] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1992.243707] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1992.243859] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1992.244025] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1992.244249] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1992.244416] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1992.244689] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1992.244759] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1992.244917] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1992.245849] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45c854a-0f50-472d-a11f-b622f94d600e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.255432] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47a13cc-667a-4f95-a36f-10b8bf06c5c3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.320460] env[63279]: DEBUG nova.scheduler.client.report [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1992.337540] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087204, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.600368] env[63279]: DEBUG nova.compute.manager [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Received event network-changed-271bdcf3-0171-47b9-8e8f-e190604053fd {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1992.600643] env[63279]: DEBUG nova.compute.manager [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Refreshing instance network info cache due to event network-changed-271bdcf3-0171-47b9-8e8f-e190604053fd. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1992.600837] env[63279]: DEBUG oslo_concurrency.lockutils [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] Acquiring lock "refresh_cache-32789822-cb54-43e7-beae-b5ed3002f4ad" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1992.600984] env[63279]: DEBUG oslo_concurrency.lockutils [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] Acquired lock "refresh_cache-32789822-cb54-43e7-beae-b5ed3002f4ad" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1992.601434] env[63279]: DEBUG nova.network.neutron [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Refreshing network info cache for port 271bdcf3-0171-47b9-8e8f-e190604053fd {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1992.624621] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.829064] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.829641] env[63279]: DEBUG nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1992.834688] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.931s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.835481] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.838118] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.298s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.838760] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.840499] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.707s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.842421] env[63279]: INFO nova.compute.claims [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1992.862648] env[63279]: DEBUG oslo_vmware.api [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087204, 'name': PowerOnVM_Task, 'duration_secs': 0.924764} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.862938] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1992.863166] env[63279]: DEBUG nova.compute.manager [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1992.864807] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2676e34f-8acc-43b2-b76d-62ddbe86f090 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.889923] env[63279]: INFO nova.scheduler.client.report [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Deleted allocations for instance e04f06de-da6a-4562-a50a-ff16bf3a006e [ 1992.894832] env[63279]: INFO nova.scheduler.client.report [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Deleted allocations for instance cf1b70af-335d-404b-bb4f-fe082dd6f450 [ 1992.997585] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.063270] env[63279]: DEBUG oslo_concurrency.lockutils [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "5d4be656-defe-4332-b97e-e88b107ca4a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.063535] env[63279]: DEBUG oslo_concurrency.lockutils [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "5d4be656-defe-4332-b97e-e88b107ca4a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.063753] env[63279]: DEBUG oslo_concurrency.lockutils [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "5d4be656-defe-4332-b97e-e88b107ca4a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.063942] env[63279]: DEBUG oslo_concurrency.lockutils [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "5d4be656-defe-4332-b97e-e88b107ca4a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.066439] env[63279]: DEBUG oslo_concurrency.lockutils [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "5d4be656-defe-4332-b97e-e88b107ca4a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.069151] env[63279]: INFO nova.compute.manager [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Terminating instance [ 1993.123176] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.357246] env[63279]: DEBUG nova.compute.utils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1993.361044] env[63279]: DEBUG nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1993.361216] env[63279]: DEBUG nova.network.neutron [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1993.384632] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.415592] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c642d793-e6da-4184-9ed0-9d696e5f426f tempest-ServerMetadataNegativeTestJSON-1632547526 tempest-ServerMetadataNegativeTestJSON-1632547526-project-member] Lock "e04f06de-da6a-4562-a50a-ff16bf3a006e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.269s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.417662] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c68279eb-6991-4d9c-8f4c-d9227ffe7480 tempest-ServersNegativeTestMultiTenantJSON-98060902 tempest-ServersNegativeTestMultiTenantJSON-98060902-project-member] Lock "cf1b70af-335d-404b-bb4f-fe082dd6f450" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.632s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.534435] env[63279]: DEBUG nova.policy [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37e32e1b25d1432aadda8ab95694aca0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70c8735ebd5740c4b8b4d0cf8635da71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1993.573630] env[63279]: DEBUG nova.compute.manager [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1993.573901] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1993.574857] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7813c608-7215-4ceb-8ffd-5bef451c6f35 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.584583] env[63279]: DEBUG nova.network.neutron [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Updated VIF entry in instance network info cache for port 271bdcf3-0171-47b9-8e8f-e190604053fd. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1993.584942] env[63279]: DEBUG nova.network.neutron [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Updating instance_info_cache with network_info: [{"id": "271bdcf3-0171-47b9-8e8f-e190604053fd", "address": "fa:16:3e:0e:9e:e4", "network": {"id": "a7b83e75-3b16-41db-9395-90dead128e80", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-756195345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd8bdc7d8664db698236f56d82adcf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap271bdcf3-01", "ovs_interfaceid": "271bdcf3-0171-47b9-8e8f-e190604053fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1993.591956] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1993.593253] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5cbabb6e-f034-4bee-918e-16ae04db3c79 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.603370] env[63279]: DEBUG oslo_vmware.api [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 1993.603370] env[63279]: value = "task-2087206" [ 1993.603370] env[63279]: _type = "Task" [ 1993.603370] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.614795] env[63279]: DEBUG oslo_vmware.api [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2087206, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.625239] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.866077] env[63279]: DEBUG nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1994.091214] env[63279]: DEBUG oslo_concurrency.lockutils [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] Releasing lock "refresh_cache-32789822-cb54-43e7-beae-b5ed3002f4ad" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1994.091214] env[63279]: DEBUG nova.compute.manager [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Received event network-changed-4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 1994.091360] env[63279]: DEBUG nova.compute.manager [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Refreshing instance network info cache due to event network-changed-4160b9e6-5e90-458c-bb0f-afc6be383dc1. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 1994.091572] env[63279]: DEBUG oslo_concurrency.lockutils [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] Acquiring lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.091713] env[63279]: DEBUG oslo_concurrency.lockutils [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] Acquired lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.092405] env[63279]: DEBUG nova.network.neutron [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Refreshing network info cache for port 4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1994.118634] env[63279]: DEBUG oslo_vmware.api [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2087206, 'name': PowerOffVM_Task, 'duration_secs': 0.212142} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.125308] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1994.126748] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1994.126748] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-010443e0-41a7-4281-8e5b-6895b09f11aa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.133289] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.439950] env[63279]: DEBUG nova.network.neutron [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Successfully created port: 5fc8a732-f9cf-4494-b984-31c593d7106b {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1994.464696] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7796b1c-df0e-4acf-b22c-de188683a6e5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.473019] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62506d4-92fa-45f0-8904-fdf6172f1b75 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.511100] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07582f86-54bd-4601-a879-97144911d36a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.520713] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1475eb08-f228-4a91-8b89-fb499298093f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.539331] env[63279]: DEBUG nova.compute.provider_tree [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1994.635143] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.889499] env[63279]: DEBUG nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1994.920730] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1994.920998] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1994.921186] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1994.921376] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1994.921528] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1994.921680] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1994.922161] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1994.922407] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1994.922594] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1994.923162] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1994.923162] env[63279]: DEBUG nova.virt.hardware [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1994.923810] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5211b5bf-1f14-48d8-80d8-f40b28b6a6da {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.932556] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c69fbb7-7c36-4570-81a4-2fb1c64ca062 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.045568] env[63279]: DEBUG nova.scheduler.client.report [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1995.131263] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.266915] env[63279]: DEBUG nova.network.neutron [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Updated VIF entry in instance network info cache for port 4160b9e6-5e90-458c-bb0f-afc6be383dc1. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1995.266915] env[63279]: DEBUG nova.network.neutron [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Updating instance_info_cache with network_info: [{"id": "4160b9e6-5e90-458c-bb0f-afc6be383dc1", "address": "fa:16:3e:0b:30:a6", "network": {"id": "948d327b-554a-4c1d-a483-9a067d60f6bc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1383523654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba92e8896d7a4605bec96ce7ee7d4a4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4160b9e6-5e", "ovs_interfaceid": "4160b9e6-5e90-458c-bb0f-afc6be383dc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1995.555025] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.711s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.555025] env[63279]: DEBUG nova.compute.manager [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1995.556481] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.115s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.556841] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.559941] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.032s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.560339] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.562258] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.613s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.564058] env[63279]: INFO nova.compute.claims [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1995.608238] env[63279]: INFO nova.scheduler.client.report [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Deleted allocations for instance 5d4909ea-396c-45ba-9ff5-acb8576150b3 [ 1995.610351] env[63279]: INFO nova.scheduler.client.report [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Deleted allocations for instance c287072d-0ce9-4075-8895-0f64326ac303 [ 1995.630953] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.659100] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "ad435281-55a0-418a-8400-5c461a5c15ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1995.659429] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "ad435281-55a0-418a-8400-5c461a5c15ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.659684] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "ad435281-55a0-418a-8400-5c461a5c15ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1995.659948] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "ad435281-55a0-418a-8400-5c461a5c15ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.660176] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "ad435281-55a0-418a-8400-5c461a5c15ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.662653] env[63279]: INFO nova.compute.manager [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Terminating instance [ 1995.771077] env[63279]: DEBUG oslo_concurrency.lockutils [req-29db22f8-afae-4654-bddd-2e12f6bdb928 req-43876836-ae86-409b-844d-0127da125e1e service nova] Releasing lock "refresh_cache-357f08c9-4de9-4b84-8384-6bf130872f40" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.070119] env[63279]: DEBUG nova.compute.utils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1996.075171] env[63279]: DEBUG nova.compute.manager [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1996.075171] env[63279]: DEBUG nova.network.neutron [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1996.124970] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b2797209-49d4-41bc-9939-7636d6375b08 tempest-VolumesAssistedSnapshotsTest-1202308672 tempest-VolumesAssistedSnapshotsTest-1202308672-project-member] Lock "5d4909ea-396c-45ba-9ff5-acb8576150b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.718s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1996.129047] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38ca3fe5-945f-4d2c-98c3-74f4e2b459ed tempest-ServerAddressesNegativeTestJSON-377386487 tempest-ServerAddressesNegativeTestJSON-377386487-project-member] Lock "c287072d-0ce9-4075-8895-0f64326ac303" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.291s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1996.140572] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.165872] env[63279]: DEBUG nova.compute.manager [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1996.166103] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1996.167142] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa6f5b1-f3ca-43db-9428-1910e922f4c6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.178316] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1996.178641] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3fcf16a-4a2d-4082-9395-335644c8bee0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.186079] env[63279]: DEBUG oslo_vmware.api [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 1996.186079] env[63279]: value = "task-2087208" [ 1996.186079] env[63279]: _type = "Task" [ 1996.186079] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.199380] env[63279]: DEBUG oslo_vmware.api [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087208, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.236264] env[63279]: DEBUG nova.policy [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c9e365ca4f941f19cd9e82676a52acc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f6c6f65521a440fb80278bbff2d0ed0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1996.577494] env[63279]: DEBUG nova.compute.manager [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1996.645619] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.701237] env[63279]: DEBUG oslo_vmware.api [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087208, 'name': PowerOffVM_Task, 'duration_secs': 0.253861} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.701237] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1996.701237] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1996.701237] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-095b1621-7b91-476f-b54b-d6d2ee329967 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.032628] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3289533-5a87-407c-b879-f5f68d097d44 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.042650] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505351f3-3eb1-4801-8e9d-40bdc5edc562 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.083574] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d22b7a-faca-4927-91a5-12553a90461a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.097679] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99914ce6-7f31-4596-a01d-ef4c341b7a38 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.113042] env[63279]: DEBUG nova.compute.provider_tree [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1997.140984] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.234013] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "b981ac83-6c23-4d44-bd28-12da30d746bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.235944] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "b981ac83-6c23-4d44-bd28-12da30d746bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.418444] env[63279]: DEBUG nova.network.neutron [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Successfully created port: 23e2695e-f865-4e3f-9f26-d5bd599cf889 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1997.594660] env[63279]: DEBUG nova.compute.manager [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1997.617200] env[63279]: DEBUG nova.scheduler.client.report [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1997.629031] env[63279]: DEBUG nova.virt.hardware [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1997.629374] env[63279]: DEBUG nova.virt.hardware [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1997.629552] env[63279]: DEBUG nova.virt.hardware [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1997.629831] env[63279]: DEBUG nova.virt.hardware [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1997.629983] env[63279]: DEBUG nova.virt.hardware [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1997.630185] env[63279]: DEBUG nova.virt.hardware [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1997.630469] env[63279]: DEBUG nova.virt.hardware [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1997.630696] env[63279]: DEBUG nova.virt.hardware [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1997.630890] env[63279]: DEBUG nova.virt.hardware [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1997.631085] env[63279]: DEBUG nova.virt.hardware [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1997.631276] env[63279]: DEBUG nova.virt.hardware [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1997.632723] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa0e9f8-9517-4be2-9fab-83b25cccf6f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.649481] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.651917] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15559cc0-bbda-4ebd-ab83-7a802056c326 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.128030] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.563s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1998.128030] env[63279]: DEBUG nova.compute.manager [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1998.131763] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.519s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.135987] env[63279]: INFO nova.compute.claims [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1998.151020] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.595539] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "795560b4-ccdc-4012-8130-042dcb94085f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.595539] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "795560b4-ccdc-4012-8130-042dcb94085f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.628091] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "cd00cb0e-30e5-4a0c-8612-ea92e5e32edd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.628531] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "cd00cb0e-30e5-4a0c-8612-ea92e5e32edd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.642080] env[63279]: DEBUG nova.compute.utils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1998.645386] env[63279]: DEBUG nova.compute.manager [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1998.645944] env[63279]: DEBUG nova.network.neutron [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1998.661128] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.672029] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "fd9b1666-8e06-4ed0-9187-05a40e136a1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.672029] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "fd9b1666-8e06-4ed0-9187-05a40e136a1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.817025] env[63279]: DEBUG nova.policy [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bfdf4a1d3ff3404fbc0bb9d3cc75a6dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '651b8183237a4e6dbef36aa2fb419f1b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1999.146017] env[63279]: DEBUG nova.compute.manager [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1999.167962] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.674144] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.750327] env[63279]: DEBUG nova.network.neutron [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Successfully created port: c2c5d3a4-c909-4508-97e5-c87eee8b7d50 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1999.774173] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a2fbe4-5098-420e-b77c-37e42cceb341 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.782731] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a9f49f-ae72-4573-8c86-222c41af6ce2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.820364] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e18172-cd5e-40c9-a411-dc3d6e6e40a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.833944] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341cd4ae-fe68-4651-9445-146455835196 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.849293] env[63279]: DEBUG nova.compute.provider_tree [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2000.167869] env[63279]: DEBUG nova.compute.manager [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2000.169892] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.192295] env[63279]: DEBUG nova.virt.hardware [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2000.192554] env[63279]: DEBUG nova.virt.hardware [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2000.192712] env[63279]: DEBUG nova.virt.hardware [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2000.192898] env[63279]: DEBUG nova.virt.hardware [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2000.193159] env[63279]: DEBUG nova.virt.hardware [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2000.193374] env[63279]: DEBUG nova.virt.hardware [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2000.193556] env[63279]: DEBUG nova.virt.hardware [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2000.193883] env[63279]: DEBUG nova.virt.hardware [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2000.193883] env[63279]: DEBUG nova.virt.hardware [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2000.194094] env[63279]: DEBUG nova.virt.hardware [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2000.194501] env[63279]: DEBUG nova.virt.hardware [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2000.195246] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10abe068-092e-43c8-af11-660d1ac11da3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.204038] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860d1ab9-5b5c-43b4-a325-6abd249b0e68 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.356285] env[63279]: DEBUG nova.scheduler.client.report [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2000.388631] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2000.388631] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2000.388631] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleting the datastore file [datastore1] ad435281-55a0-418a-8400-5c461a5c15ef {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2000.389121] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d082ea5-22dc-47e4-81cd-f67f3942530f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.396834] env[63279]: DEBUG oslo_vmware.api [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2000.396834] env[63279]: value = "task-2087210" [ 2000.396834] env[63279]: _type = "Task" [ 2000.396834] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.413378] env[63279]: DEBUG oslo_vmware.api [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.450762] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2000.451026] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2000.451217] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Deleting the datastore file [datastore1] 5d4be656-defe-4332-b97e-e88b107ca4a1 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2000.453146] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d45068f2-1de0-4312-9f59-dcb64ac5a2e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.461408] env[63279]: DEBUG oslo_vmware.api [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for the task: (returnval){ [ 2000.461408] env[63279]: value = "task-2087211" [ 2000.461408] env[63279]: _type = "Task" [ 2000.461408] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.474996] env[63279]: DEBUG oslo_vmware.api [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2087211, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.672876] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087205, 'name': CreateVM_Task, 'duration_secs': 8.477213} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.673441] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2000.675515] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2000.676386] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2000.676386] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2000.679676] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0ce7439-631d-465e-84e8-07fef30c6682 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.683352] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2000.683352] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52fe4cd2-0a17-6fc4-9a14-f0eb6d169fa1" [ 2000.683352] env[63279]: _type = "Task" [ 2000.683352] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.693759] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fe4cd2-0a17-6fc4-9a14-f0eb6d169fa1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.864131] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.732s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2000.867222] env[63279]: DEBUG nova.compute.manager [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2000.869666] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.632s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.871333] env[63279]: INFO nova.compute.claims [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2000.909223] env[63279]: DEBUG oslo_vmware.api [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151745} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.909567] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2000.909783] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2000.909979] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2000.910149] env[63279]: INFO nova.compute.manager [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Took 4.74 seconds to destroy the instance on the hypervisor. [ 2000.910573] env[63279]: DEBUG oslo.service.loopingcall [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2000.910649] env[63279]: DEBUG nova.compute.manager [-] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2000.910713] env[63279]: DEBUG nova.network.neutron [-] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2000.976099] env[63279]: DEBUG oslo_vmware.api [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Task: {'id': task-2087211, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155618} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.976643] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2000.977377] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2000.977568] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2000.977775] env[63279]: INFO nova.compute.manager [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Took 7.40 seconds to destroy the instance on the hypervisor. [ 2000.978472] env[63279]: DEBUG oslo.service.loopingcall [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2000.978733] env[63279]: DEBUG nova.compute.manager [-] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2000.978836] env[63279]: DEBUG nova.network.neutron [-] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2001.202094] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fe4cd2-0a17-6fc4-9a14-f0eb6d169fa1, 'name': SearchDatastore_Task, 'duration_secs': 0.010107} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.202094] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2001.202094] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2001.202094] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2001.202323] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2001.202458] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2001.202735] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e00043ab-59cc-4796-9000-440fa03490a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.211952] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2001.212311] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2001.212855] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a186091e-b46d-4097-b3be-b713261b88e1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.220879] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2001.220879] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5264b77c-b404-148d-c677-3b602aa9429d" [ 2001.220879] env[63279]: _type = "Task" [ 2001.220879] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2001.237103] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5264b77c-b404-148d-c677-3b602aa9429d, 'name': SearchDatastore_Task, 'duration_secs': 0.008658} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.240077] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f744e00-7735-4823-b0da-40951d5af110 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.247313] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2001.247313] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d7df8f-8483-6cc7-3e3a-592b8e57ffad" [ 2001.247313] env[63279]: _type = "Task" [ 2001.247313] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2001.256847] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d7df8f-8483-6cc7-3e3a-592b8e57ffad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.377994] env[63279]: DEBUG nova.compute.utils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2001.383999] env[63279]: DEBUG nova.compute.manager [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2001.384851] env[63279]: DEBUG nova.network.neutron [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2001.635754] env[63279]: DEBUG nova.policy [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17fbc0127a5944ac933232873f282980', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '77ee2145dda94e2b85eeb7379ed98e26', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2001.764891] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d7df8f-8483-6cc7-3e3a-592b8e57ffad, 'name': SearchDatastore_Task, 'duration_secs': 0.009636} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.768770] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2001.768770] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 32789822-cb54-43e7-beae-b5ed3002f4ad/32789822-cb54-43e7-beae-b5ed3002f4ad.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2001.768770] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79bee046-bce4-4a53-b2d9-ac6f6932b0e1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.773190] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2001.773190] env[63279]: value = "task-2087212" [ 2001.773190] env[63279]: _type = "Task" [ 2001.773190] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2001.783310] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087212, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.880467] env[63279]: DEBUG nova.compute.manager [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2002.286231] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087212, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.46151} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2002.289891] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 32789822-cb54-43e7-beae-b5ed3002f4ad/32789822-cb54-43e7-beae-b5ed3002f4ad.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2002.290421] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2002.291690] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-600f0f7d-12bd-4e59-afa9-43953fdef05e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.299967] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2002.299967] env[63279]: value = "task-2087213" [ 2002.299967] env[63279]: _type = "Task" [ 2002.299967] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2002.313315] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087213, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.324718] env[63279]: DEBUG nova.network.neutron [-] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2002.462892] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9ff6db-6f06-41c4-96a4-b23a53d9bd87 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.472216] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf851b2d-f7f9-46bb-9649-84e978aff4d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.505392] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6cdf74-4af8-4b4b-9235-d3c23cf377b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.513376] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b496d40d-b3ca-4829-993f-f8dcc2a05051 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.527524] env[63279]: DEBUG nova.compute.provider_tree [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2002.563189] env[63279]: DEBUG nova.network.neutron [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Successfully updated port: a55a63d0-c628-4d90-b3dc-d8f0bf564e95 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2002.564458] env[63279]: DEBUG nova.network.neutron [-] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2002.636879] env[63279]: DEBUG nova.network.neutron [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Successfully updated port: 5fc8a732-f9cf-4494-b984-31c593d7106b {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2002.704071] env[63279]: DEBUG nova.network.neutron [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Successfully updated port: 23e2695e-f865-4e3f-9f26-d5bd599cf889 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2002.745053] env[63279]: DEBUG nova.network.neutron [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Successfully updated port: c2c5d3a4-c909-4508-97e5-c87eee8b7d50 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2002.766344] env[63279]: DEBUG nova.network.neutron [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Successfully created port: a84d472c-4f07-4a61-8eca-135f97267755 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2002.811605] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087213, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107091} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2002.812383] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2002.813249] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b356aeaf-cf52-4879-a9c8-ed5b9e9c577d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.838720] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 32789822-cb54-43e7-beae-b5ed3002f4ad/32789822-cb54-43e7-beae-b5ed3002f4ad.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2002.839558] env[63279]: INFO nova.compute.manager [-] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Took 1.93 seconds to deallocate network for instance. [ 2002.839787] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-050d9075-43be-423f-a678-48940258a3a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.861910] env[63279]: DEBUG nova.compute.manager [req-f4c3e184-0baf-4deb-b5dc-a977f23dfea8 req-c9b8f3ab-2eca-49e2-8e83-3ce9457e20b0 service nova] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Received event network-vif-deleted-699a008e-52f8-4319-9e86-01acc5c037f9 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2002.871409] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2002.871409] env[63279]: value = "task-2087214" [ 2002.871409] env[63279]: _type = "Task" [ 2002.871409] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2002.879821] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087214, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.894487] env[63279]: DEBUG nova.compute.manager [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2002.924125] env[63279]: DEBUG nova.virt.hardware [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2002.924125] env[63279]: DEBUG nova.virt.hardware [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2002.924125] env[63279]: DEBUG nova.virt.hardware [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2002.924125] env[63279]: DEBUG nova.virt.hardware [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2002.924325] env[63279]: DEBUG nova.virt.hardware [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2002.924325] env[63279]: DEBUG nova.virt.hardware [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2002.924487] env[63279]: DEBUG nova.virt.hardware [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2002.924645] env[63279]: DEBUG nova.virt.hardware [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2002.924807] env[63279]: DEBUG nova.virt.hardware [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2002.924965] env[63279]: DEBUG nova.virt.hardware [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2002.925155] env[63279]: DEBUG nova.virt.hardware [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2002.926372] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c198002a-5a40-445d-b275-f38c6ab5fb65 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.935143] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03239d3e-75fa-4af2-bfa8-acb5f5ea7095 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.942824] env[63279]: DEBUG nova.compute.manager [req-109cc5b4-8611-4bfa-a2f3-fd5fb616a07d req-4462c06d-46ca-49bc-b500-3bdd84c89dfa service nova] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Received event network-vif-plugged-a55a63d0-c628-4d90-b3dc-d8f0bf564e95 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2002.943096] env[63279]: DEBUG oslo_concurrency.lockutils [req-109cc5b4-8611-4bfa-a2f3-fd5fb616a07d req-4462c06d-46ca-49bc-b500-3bdd84c89dfa service nova] Acquiring lock "1b2ca21b-feea-4fc1-9ddc-99f144e4241a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.943332] env[63279]: DEBUG oslo_concurrency.lockutils [req-109cc5b4-8611-4bfa-a2f3-fd5fb616a07d req-4462c06d-46ca-49bc-b500-3bdd84c89dfa service nova] Lock "1b2ca21b-feea-4fc1-9ddc-99f144e4241a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.943467] env[63279]: DEBUG oslo_concurrency.lockutils [req-109cc5b4-8611-4bfa-a2f3-fd5fb616a07d req-4462c06d-46ca-49bc-b500-3bdd84c89dfa service nova] Lock "1b2ca21b-feea-4fc1-9ddc-99f144e4241a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.943631] env[63279]: DEBUG nova.compute.manager [req-109cc5b4-8611-4bfa-a2f3-fd5fb616a07d req-4462c06d-46ca-49bc-b500-3bdd84c89dfa service nova] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] No waiting events found dispatching network-vif-plugged-a55a63d0-c628-4d90-b3dc-d8f0bf564e95 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2002.943791] env[63279]: WARNING nova.compute.manager [req-109cc5b4-8611-4bfa-a2f3-fd5fb616a07d req-4462c06d-46ca-49bc-b500-3bdd84c89dfa service nova] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Received unexpected event network-vif-plugged-a55a63d0-c628-4d90-b3dc-d8f0bf564e95 for instance with vm_state building and task_state spawning. [ 2003.032344] env[63279]: DEBUG nova.scheduler.client.report [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2003.067416] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "refresh_cache-1b2ca21b-feea-4fc1-9ddc-99f144e4241a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2003.067728] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired lock "refresh_cache-1b2ca21b-feea-4fc1-9ddc-99f144e4241a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2003.067893] env[63279]: DEBUG nova.network.neutron [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2003.069164] env[63279]: INFO nova.compute.manager [-] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Took 2.09 seconds to deallocate network for instance. [ 2003.140711] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "refresh_cache-fb124cfa-24b4-4712-b8cc-c87df5d6231b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2003.141131] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired lock "refresh_cache-fb124cfa-24b4-4712-b8cc-c87df5d6231b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2003.141131] env[63279]: DEBUG nova.network.neutron [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2003.210927] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "refresh_cache-58392790-b297-4894-8d81-e5cbda69872b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2003.210927] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "refresh_cache-58392790-b297-4894-8d81-e5cbda69872b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2003.211444] env[63279]: DEBUG nova.network.neutron [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2003.250432] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "refresh_cache-8ccb4293-927a-45ba-82e9-9f1b4d5985cc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2003.250432] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquired lock "refresh_cache-8ccb4293-927a-45ba-82e9-9f1b4d5985cc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2003.250432] env[63279]: DEBUG nova.network.neutron [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2003.316719] env[63279]: DEBUG oslo_concurrency.lockutils [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "6699de0a-b3f8-4d84-9c9b-d0f6899a606e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.316976] env[63279]: DEBUG oslo_concurrency.lockutils [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "6699de0a-b3f8-4d84-9c9b-d0f6899a606e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.367492] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.382742] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087214, 'name': ReconfigVM_Task, 'duration_secs': 0.287266} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.383188] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 32789822-cb54-43e7-beae-b5ed3002f4ad/32789822-cb54-43e7-beae-b5ed3002f4ad.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2003.384217] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5bd5629-235f-44cc-8393-de965e4e2f3b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.392170] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2003.392170] env[63279]: value = "task-2087215" [ 2003.392170] env[63279]: _type = "Task" [ 2003.392170] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.406846] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087215, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.539078] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.670s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.539633] env[63279]: DEBUG nova.compute.manager [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2003.545359] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.271s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.545359] env[63279]: DEBUG nova.objects.instance [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Lazy-loading 'resources' on Instance uuid 044335c7-ce3b-4b4a-b1dc-8b9acec538b4 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2003.578625] env[63279]: DEBUG oslo_concurrency.lockutils [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.610903] env[63279]: DEBUG nova.network.neutron [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2003.684392] env[63279]: DEBUG nova.network.neutron [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2003.750156] env[63279]: DEBUG nova.network.neutron [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2003.844740] env[63279]: DEBUG nova.network.neutron [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2003.908337] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087215, 'name': Rename_Task, 'duration_secs': 0.139804} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.908788] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2003.909434] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ebda5a92-6fcf-4a1c-982d-553084769342 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.916952] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2003.916952] env[63279]: value = "task-2087216" [ 2003.916952] env[63279]: _type = "Task" [ 2003.916952] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.925053] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087216, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.959974] env[63279]: DEBUG nova.network.neutron [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Updating instance_info_cache with network_info: [{"id": "a55a63d0-c628-4d90-b3dc-d8f0bf564e95", "address": "fa:16:3e:f7:b2:77", "network": {"id": "0a342148-49f5-4993-8a5d-a1f27ddd6002", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-668871377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70c8735ebd5740c4b8b4d0cf8635da71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa55a63d0-c6", "ovs_interfaceid": "a55a63d0-c628-4d90-b3dc-d8f0bf564e95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2004.045643] env[63279]: DEBUG nova.compute.utils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2004.047357] env[63279]: DEBUG nova.compute.manager [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2004.047540] env[63279]: DEBUG nova.network.neutron [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2004.061272] env[63279]: DEBUG nova.network.neutron [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Updating instance_info_cache with network_info: [{"id": "23e2695e-f865-4e3f-9f26-d5bd599cf889", "address": "fa:16:3e:f9:66:15", "network": {"id": "548d80cd-fb6c-47fc-8c1d-036889987399", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-219167599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f6c6f65521a440fb80278bbff2d0ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e2695e-f8", "ovs_interfaceid": "23e2695e-f865-4e3f-9f26-d5bd599cf889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2004.130026] env[63279]: DEBUG nova.network.neutron [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Updating instance_info_cache with network_info: [{"id": "5fc8a732-f9cf-4494-b984-31c593d7106b", "address": "fa:16:3e:60:77:0c", "network": {"id": "0a342148-49f5-4993-8a5d-a1f27ddd6002", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-668871377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70c8735ebd5740c4b8b4d0cf8635da71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc8a732-f9", "ovs_interfaceid": "5fc8a732-f9cf-4494-b984-31c593d7106b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2004.143321] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "acf95fad-316c-4605-9008-24d4d7c05892" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2004.143577] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "acf95fad-316c-4605-9008-24d4d7c05892" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2004.143785] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "acf95fad-316c-4605-9008-24d4d7c05892-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2004.144166] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "acf95fad-316c-4605-9008-24d4d7c05892-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2004.144301] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "acf95fad-316c-4605-9008-24d4d7c05892-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2004.153027] env[63279]: INFO nova.compute.manager [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Terminating instance [ 2004.165989] env[63279]: DEBUG nova.policy [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a111f7e262b04ab9b1bc14397ce09b08', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0b41258d6a444b558b3f3256f2f7d6eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2004.253501] env[63279]: DEBUG nova.network.neutron [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Updating instance_info_cache with network_info: [{"id": "c2c5d3a4-c909-4508-97e5-c87eee8b7d50", "address": "fa:16:3e:e6:0a:1b", "network": {"id": "4f155366-ade7-4d4b-8fad-a2b55798980f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-669492310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651b8183237a4e6dbef36aa2fb419f1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2c5d3a4-c9", "ovs_interfaceid": "c2c5d3a4-c909-4508-97e5-c87eee8b7d50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2004.276739] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2004.276906] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2004.431046] env[63279]: DEBUG oslo_vmware.api [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087216, 'name': PowerOnVM_Task, 'duration_secs': 0.500927} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2004.431563] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2004.432055] env[63279]: INFO nova.compute.manager [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Took 16.37 seconds to spawn the instance on the hypervisor. [ 2004.432267] env[63279]: DEBUG nova.compute.manager [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2004.436351] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf6a3cb-8ec9-4bac-9050-2507903cc44f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.462453] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Releasing lock "refresh_cache-1b2ca21b-feea-4fc1-9ddc-99f144e4241a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2004.463111] env[63279]: DEBUG nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Instance network_info: |[{"id": "a55a63d0-c628-4d90-b3dc-d8f0bf564e95", "address": "fa:16:3e:f7:b2:77", "network": {"id": "0a342148-49f5-4993-8a5d-a1f27ddd6002", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-668871377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70c8735ebd5740c4b8b4d0cf8635da71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa55a63d0-c6", "ovs_interfaceid": "a55a63d0-c628-4d90-b3dc-d8f0bf564e95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2004.463657] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:b2:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4345ef6-a7c8-4c1c-badf-a0d4f578b61c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a55a63d0-c628-4d90-b3dc-d8f0bf564e95', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2004.471030] env[63279]: DEBUG oslo.service.loopingcall [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2004.471030] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2004.471363] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c1181ee3-41a1-411c-a420-2b6336512da0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.495712] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2004.495712] env[63279]: value = "task-2087217" [ 2004.495712] env[63279]: _type = "Task" [ 2004.495712] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.503410] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087217, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.556221] env[63279]: DEBUG nova.compute.manager [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2004.560047] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "refresh_cache-58392790-b297-4894-8d81-e5cbda69872b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2004.560361] env[63279]: DEBUG nova.compute.manager [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Instance network_info: |[{"id": "23e2695e-f865-4e3f-9f26-d5bd599cf889", "address": "fa:16:3e:f9:66:15", "network": {"id": "548d80cd-fb6c-47fc-8c1d-036889987399", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-219167599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f6c6f65521a440fb80278bbff2d0ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e2695e-f8", "ovs_interfaceid": "23e2695e-f865-4e3f-9f26-d5bd599cf889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2004.561202] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:66:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '678ebbe4-4c53-4eaf-a689-93981310f37d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23e2695e-f865-4e3f-9f26-d5bd599cf889', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2004.571665] env[63279]: DEBUG oslo.service.loopingcall [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2004.574918] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2004.574918] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c41158d-0cc7-4ad1-982a-43e7336fa93f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.603160] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2004.603160] env[63279]: value = "task-2087218" [ 2004.603160] env[63279]: _type = "Task" [ 2004.603160] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.612108] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087218, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.630237] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Releasing lock "refresh_cache-fb124cfa-24b4-4712-b8cc-c87df5d6231b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2004.633045] env[63279]: DEBUG nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Instance network_info: |[{"id": "5fc8a732-f9cf-4494-b984-31c593d7106b", "address": "fa:16:3e:60:77:0c", "network": {"id": "0a342148-49f5-4993-8a5d-a1f27ddd6002", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-668871377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70c8735ebd5740c4b8b4d0cf8635da71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc8a732-f9", "ovs_interfaceid": "5fc8a732-f9cf-4494-b984-31c593d7106b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2004.633677] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:77:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4345ef6-a7c8-4c1c-badf-a0d4f578b61c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5fc8a732-f9cf-4494-b984-31c593d7106b', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2004.643197] env[63279]: DEBUG oslo.service.loopingcall [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2004.649024] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2004.649024] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abb9af88-7bd1-496c-bd37-deff33337097 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.664259] env[63279]: DEBUG nova.compute.manager [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2004.664511] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2004.666624] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff928eb-44ed-497c-a8c4-cc0a369c0fd3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.674047] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2004.675351] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25a28af2-d243-438e-b577-a5e1c62ace5f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.676962] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2004.676962] env[63279]: value = "task-2087219" [ 2004.676962] env[63279]: _type = "Task" [ 2004.676962] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.678884] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c360572-e26e-4206-871b-5c6a730b57f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.688220] env[63279]: DEBUG oslo_vmware.api [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 2004.688220] env[63279]: value = "task-2087220" [ 2004.688220] env[63279]: _type = "Task" [ 2004.688220] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.696850] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087219, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.698220] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136eab79-b15e-47cc-91b2-047def6c0f69 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.708686] env[63279]: DEBUG oslo_vmware.api [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087220, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.738278] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0cf4de-5072-4154-ba04-54c6e9bbccd3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.746643] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae81581-aa7f-4d19-9c87-85bfda35bbb7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.761775] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Releasing lock "refresh_cache-8ccb4293-927a-45ba-82e9-9f1b4d5985cc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2004.762057] env[63279]: DEBUG nova.compute.manager [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Instance network_info: |[{"id": "c2c5d3a4-c909-4508-97e5-c87eee8b7d50", "address": "fa:16:3e:e6:0a:1b", "network": {"id": "4f155366-ade7-4d4b-8fad-a2b55798980f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-669492310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651b8183237a4e6dbef36aa2fb419f1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2c5d3a4-c9", "ovs_interfaceid": "c2c5d3a4-c909-4508-97e5-c87eee8b7d50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2004.762576] env[63279]: DEBUG nova.compute.provider_tree [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2004.764162] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:0a:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50cf0a70-948d-4611-af05-94c1483064ed', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2c5d3a4-c909-4508-97e5-c87eee8b7d50', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2004.772164] env[63279]: DEBUG oslo.service.loopingcall [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2004.773026] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2004.773297] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2f292e4-9e94-4bd1-8cfe-33d91e7d75b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.797601] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2004.797601] env[63279]: value = "task-2087221" [ 2004.797601] env[63279]: _type = "Task" [ 2004.797601] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.799310] env[63279]: DEBUG nova.network.neutron [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Successfully updated port: a84d472c-4f07-4a61-8eca-135f97267755 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2004.806897] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087221, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.833078] env[63279]: DEBUG nova.network.neutron [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Successfully created port: cbbfde33-a0b6-4403-8a1e-d688a0a7147b {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2004.961187] env[63279]: INFO nova.compute.manager [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Took 54.55 seconds to build instance. [ 2005.005732] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087217, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.116725] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087218, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.191655] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087219, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.201632] env[63279]: DEBUG oslo_vmware.api [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087220, 'name': PowerOffVM_Task, 'duration_secs': 0.238944} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.202641] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2005.202641] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2005.202738] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef10f3ff-caed-4b6b-9fba-12ef8d7b99ab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.274539] env[63279]: DEBUG nova.scheduler.client.report [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2005.302423] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "refresh_cache-64e92bfc-c0d0-4918-9ba2-45ffedbf7e39" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.302423] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "refresh_cache-64e92bfc-c0d0-4918-9ba2-45ffedbf7e39" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.302793] env[63279]: DEBUG nova.network.neutron [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2005.307961] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087221, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.463752] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ccac180c-f8b1-4fd5-a765-713ff5939c3d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "32789822-cb54-43e7-beae-b5ed3002f4ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.057s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.507665] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087217, 'name': CreateVM_Task, 'duration_secs': 0.561611} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.507845] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2005.508621] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.508794] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.509126] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2005.509401] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b34cf0de-e549-4050-a60b-63398aa3824e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.514763] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2005.514763] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5257396d-6183-803d-9f0d-a94fcc60f691" [ 2005.514763] env[63279]: _type = "Task" [ 2005.514763] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.522862] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5257396d-6183-803d-9f0d-a94fcc60f691, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.562351] env[63279]: DEBUG nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Received event network-vif-plugged-5fc8a732-f9cf-4494-b984-31c593d7106b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2005.562688] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Acquiring lock "fb124cfa-24b4-4712-b8cc-c87df5d6231b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.562896] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Lock "fb124cfa-24b4-4712-b8cc-c87df5d6231b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.563096] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Lock "fb124cfa-24b4-4712-b8cc-c87df5d6231b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.563276] env[63279]: DEBUG nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] No waiting events found dispatching network-vif-plugged-5fc8a732-f9cf-4494-b984-31c593d7106b {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2005.563488] env[63279]: WARNING nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Received unexpected event network-vif-plugged-5fc8a732-f9cf-4494-b984-31c593d7106b for instance with vm_state building and task_state spawning. [ 2005.563698] env[63279]: DEBUG nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Received event network-vif-plugged-c2c5d3a4-c909-4508-97e5-c87eee8b7d50 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2005.563886] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Acquiring lock "8ccb4293-927a-45ba-82e9-9f1b4d5985cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.564096] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Lock "8ccb4293-927a-45ba-82e9-9f1b4d5985cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.564265] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Lock "8ccb4293-927a-45ba-82e9-9f1b4d5985cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.564434] env[63279]: DEBUG nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] No waiting events found dispatching network-vif-plugged-c2c5d3a4-c909-4508-97e5-c87eee8b7d50 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2005.564601] env[63279]: WARNING nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Received unexpected event network-vif-plugged-c2c5d3a4-c909-4508-97e5-c87eee8b7d50 for instance with vm_state building and task_state spawning. [ 2005.564765] env[63279]: DEBUG nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Received event network-changed-5fc8a732-f9cf-4494-b984-31c593d7106b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2005.564917] env[63279]: DEBUG nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Refreshing instance network info cache due to event network-changed-5fc8a732-f9cf-4494-b984-31c593d7106b. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2005.565111] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Acquiring lock "refresh_cache-fb124cfa-24b4-4712-b8cc-c87df5d6231b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.566329] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Acquired lock "refresh_cache-fb124cfa-24b4-4712-b8cc-c87df5d6231b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.566508] env[63279]: DEBUG nova.network.neutron [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Refreshing network info cache for port 5fc8a732-f9cf-4494-b984-31c593d7106b {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2005.568868] env[63279]: DEBUG nova.compute.manager [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2005.600704] env[63279]: DEBUG nova.virt.hardware [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:53:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='ca62e09f-97fd-4b30-aed4-3874eb2dace8',id=29,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-212803514',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2005.600946] env[63279]: DEBUG nova.virt.hardware [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2005.601120] env[63279]: DEBUG nova.virt.hardware [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2005.601308] env[63279]: DEBUG nova.virt.hardware [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2005.601458] env[63279]: DEBUG nova.virt.hardware [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2005.601613] env[63279]: DEBUG nova.virt.hardware [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2005.601830] env[63279]: DEBUG nova.virt.hardware [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2005.601998] env[63279]: DEBUG nova.virt.hardware [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2005.602352] env[63279]: DEBUG nova.virt.hardware [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2005.602532] env[63279]: DEBUG nova.virt.hardware [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2005.602709] env[63279]: DEBUG nova.virt.hardware [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2005.603856] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af927fa-3d34-45e0-8eac-70f92f5e9453 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.623373] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5259c2e-7aa2-43c2-9435-5d4f9f25730a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.627541] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087218, 'name': CreateVM_Task, 'duration_secs': 0.773153} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.627714] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2005.628859] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.657702] env[63279]: DEBUG nova.compute.manager [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Received event network-vif-deleted-ec7f4196-e7c7-425d-aa69-d8a766edef7f {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2005.657921] env[63279]: DEBUG nova.compute.manager [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Received event network-vif-plugged-23e2695e-f865-4e3f-9f26-d5bd599cf889 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2005.658122] env[63279]: DEBUG oslo_concurrency.lockutils [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] Acquiring lock "58392790-b297-4894-8d81-e5cbda69872b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.658540] env[63279]: DEBUG oslo_concurrency.lockutils [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] Lock "58392790-b297-4894-8d81-e5cbda69872b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.658858] env[63279]: DEBUG oslo_concurrency.lockutils [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] Lock "58392790-b297-4894-8d81-e5cbda69872b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.659061] env[63279]: DEBUG nova.compute.manager [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] [instance: 58392790-b297-4894-8d81-e5cbda69872b] No waiting events found dispatching network-vif-plugged-23e2695e-f865-4e3f-9f26-d5bd599cf889 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2005.659319] env[63279]: WARNING nova.compute.manager [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Received unexpected event network-vif-plugged-23e2695e-f865-4e3f-9f26-d5bd599cf889 for instance with vm_state building and task_state spawning. [ 2005.659514] env[63279]: DEBUG nova.compute.manager [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Received event network-changed-a55a63d0-c628-4d90-b3dc-d8f0bf564e95 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2005.659695] env[63279]: DEBUG nova.compute.manager [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Refreshing instance network info cache due to event network-changed-a55a63d0-c628-4d90-b3dc-d8f0bf564e95. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2005.659920] env[63279]: DEBUG oslo_concurrency.lockutils [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] Acquiring lock "refresh_cache-1b2ca21b-feea-4fc1-9ddc-99f144e4241a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.660008] env[63279]: DEBUG oslo_concurrency.lockutils [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] Acquired lock "refresh_cache-1b2ca21b-feea-4fc1-9ddc-99f144e4241a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.660202] env[63279]: DEBUG nova.network.neutron [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Refreshing network info cache for port a55a63d0-c628-4d90-b3dc-d8f0bf564e95 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2005.689324] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087219, 'name': CreateVM_Task, 'duration_secs': 0.709463} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.689610] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2005.690248] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.703542] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "df963c29-a1c4-4f28-be95-cafe3af4d2fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.703728] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "df963c29-a1c4-4f28-be95-cafe3af4d2fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.779833] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.237s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.782353] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.108s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.783882] env[63279]: INFO nova.compute.claims [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2005.808472] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087221, 'name': CreateVM_Task, 'duration_secs': 0.587791} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.810699] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2005.812594] env[63279]: INFO nova.scheduler.client.report [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Deleted allocations for instance 044335c7-ce3b-4b4a-b1dc-8b9acec538b4 [ 2005.812991] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.862186] env[63279]: DEBUG nova.network.neutron [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2005.966788] env[63279]: DEBUG nova.compute.manager [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2006.030139] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5257396d-6183-803d-9f0d-a94fcc60f691, 'name': SearchDatastore_Task, 'duration_secs': 0.032984} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.030468] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2006.030712] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2006.030946] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2006.031110] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2006.031290] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2006.031571] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2006.031915] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2006.032172] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0fd9e70-b384-440b-99bf-cc7417dce795 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.033958] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e04c35d3-8343-43a9-a66a-41bc2f1f26fe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.039583] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2006.039583] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52e00fd6-d7e7-92d3-8d1f-1a4d58f4a2ac" [ 2006.039583] env[63279]: _type = "Task" [ 2006.039583] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.044246] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2006.044403] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2006.045407] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-836ea319-5dc7-4db8-b4f3-374ab13c1d1a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.050514] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e00fd6-d7e7-92d3-8d1f-1a4d58f4a2ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.053666] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2006.053666] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5249dc4f-e6f7-affd-a88d-a49009e92af5" [ 2006.053666] env[63279]: _type = "Task" [ 2006.053666] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.061234] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5249dc4f-e6f7-affd-a88d-a49009e92af5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.152708] env[63279]: DEBUG nova.network.neutron [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Updating instance_info_cache with network_info: [{"id": "a84d472c-4f07-4a61-8eca-135f97267755", "address": "fa:16:3e:f6:de:1a", "network": {"id": "6d7c4c21-3389-4688-b373-329b2328f275", "bridge": "br-int", "label": "tempest-ImagesTestJSON-311425692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77ee2145dda94e2b85eeb7379ed98e26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa84d472c-4f", "ovs_interfaceid": "a84d472c-4f07-4a61-8eca-135f97267755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.319612] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e555eb2f-a20c-4248-9729-967c1ca13219 tempest-ServerRescueTestJSONUnderV235-1989684258 tempest-ServerRescueTestJSONUnderV235-1989684258-project-member] Lock "044335c7-ce3b-4b4a-b1dc-8b9acec538b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.887s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2006.419806] env[63279]: DEBUG nova.network.neutron [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Updated VIF entry in instance network info cache for port 5fc8a732-f9cf-4494-b984-31c593d7106b. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2006.420213] env[63279]: DEBUG nova.network.neutron [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Updating instance_info_cache with network_info: [{"id": "5fc8a732-f9cf-4494-b984-31c593d7106b", "address": "fa:16:3e:60:77:0c", "network": {"id": "0a342148-49f5-4993-8a5d-a1f27ddd6002", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-668871377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70c8735ebd5740c4b8b4d0cf8635da71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc8a732-f9", "ovs_interfaceid": "5fc8a732-f9cf-4494-b984-31c593d7106b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.486023] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.557260] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e00fd6-d7e7-92d3-8d1f-1a4d58f4a2ac, 'name': SearchDatastore_Task, 'duration_secs': 0.017834} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.561249] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2006.561590] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2006.561833] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2006.562458] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2006.563167] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2006.563796] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-930c4e45-a6ba-4405-beba-0a2acf8fb3c6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.571125] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5249dc4f-e6f7-affd-a88d-a49009e92af5, 'name': SearchDatastore_Task, 'duration_secs': 0.023014} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.573381] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2006.573381] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523314fe-d102-05b8-130f-6af33af8c043" [ 2006.573381] env[63279]: _type = "Task" [ 2006.573381] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.573616] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82732f4c-82cd-4f6e-a9e7-154ad09601a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.582126] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2006.582126] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523a137e-82b5-bb15-d174-cb91041049b6" [ 2006.582126] env[63279]: _type = "Task" [ 2006.582126] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.585454] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523314fe-d102-05b8-130f-6af33af8c043, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.596045] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "32789822-cb54-43e7-beae-b5ed3002f4ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.596319] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "32789822-cb54-43e7-beae-b5ed3002f4ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2006.599920] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "32789822-cb54-43e7-beae-b5ed3002f4ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.599920] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "32789822-cb54-43e7-beae-b5ed3002f4ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2006.599920] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "32789822-cb54-43e7-beae-b5ed3002f4ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2006.599920] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523a137e-82b5-bb15-d174-cb91041049b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.600516] env[63279]: INFO nova.compute.manager [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Terminating instance [ 2006.657818] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "refresh_cache-64e92bfc-c0d0-4918-9ba2-45ffedbf7e39" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2006.657818] env[63279]: DEBUG nova.compute.manager [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Instance network_info: |[{"id": "a84d472c-4f07-4a61-8eca-135f97267755", "address": "fa:16:3e:f6:de:1a", "network": {"id": "6d7c4c21-3389-4688-b373-329b2328f275", "bridge": "br-int", "label": "tempest-ImagesTestJSON-311425692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77ee2145dda94e2b85eeb7379ed98e26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa84d472c-4f", "ovs_interfaceid": "a84d472c-4f07-4a61-8eca-135f97267755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2006.658383] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:de:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0df968ae-c1ef-4009-a0f4-6f2e799c2fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a84d472c-4f07-4a61-8eca-135f97267755', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2006.666454] env[63279]: DEBUG oslo.service.loopingcall [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2006.667506] env[63279]: DEBUG nova.network.neutron [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Updated VIF entry in instance network info cache for port a55a63d0-c628-4d90-b3dc-d8f0bf564e95. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2006.667895] env[63279]: DEBUG nova.network.neutron [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Updating instance_info_cache with network_info: [{"id": "a55a63d0-c628-4d90-b3dc-d8f0bf564e95", "address": "fa:16:3e:f7:b2:77", "network": {"id": "0a342148-49f5-4993-8a5d-a1f27ddd6002", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-668871377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70c8735ebd5740c4b8b4d0cf8635da71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa55a63d0-c6", "ovs_interfaceid": "a55a63d0-c628-4d90-b3dc-d8f0bf564e95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.669081] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2006.669335] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f6933d7-0173-4453-90a7-df471097f4a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.690776] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2006.690776] env[63279]: value = "task-2087223" [ 2006.690776] env[63279]: _type = "Task" [ 2006.690776] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.700036] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087223, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.809781] env[63279]: DEBUG nova.network.neutron [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Successfully updated port: cbbfde33-a0b6-4403-8a1e-d688a0a7147b {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2006.862241] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2006.862700] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2006.862889] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Deleting the datastore file [datastore1] acf95fad-316c-4605-9008-24d4d7c05892 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2006.863166] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac336627-8939-45c7-8f04-4e559174b375 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.870652] env[63279]: DEBUG oslo_vmware.api [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 2006.870652] env[63279]: value = "task-2087224" [ 2006.870652] env[63279]: _type = "Task" [ 2006.870652] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.880839] env[63279]: DEBUG oslo_vmware.api [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087224, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.923017] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Releasing lock "refresh_cache-fb124cfa-24b4-4712-b8cc-c87df5d6231b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2006.923298] env[63279]: DEBUG nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Received event network-changed-c2c5d3a4-c909-4508-97e5-c87eee8b7d50 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2006.923463] env[63279]: DEBUG nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Refreshing instance network info cache due to event network-changed-c2c5d3a4-c909-4508-97e5-c87eee8b7d50. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2006.923676] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Acquiring lock "refresh_cache-8ccb4293-927a-45ba-82e9-9f1b4d5985cc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2006.923818] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Acquired lock "refresh_cache-8ccb4293-927a-45ba-82e9-9f1b4d5985cc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2006.925133] env[63279]: DEBUG nova.network.neutron [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Refreshing network info cache for port c2c5d3a4-c909-4508-97e5-c87eee8b7d50 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2007.087536] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523314fe-d102-05b8-130f-6af33af8c043, 'name': SearchDatastore_Task, 'duration_secs': 0.018629} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.090972] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2007.091263] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2007.091490] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2007.091769] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2007.092090] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2007.092730] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b27345e-d677-4bc3-8b75-238c525267d8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.101970] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523a137e-82b5-bb15-d174-cb91041049b6, 'name': SearchDatastore_Task, 'duration_secs': 0.01552} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.102811] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2007.104058] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1b2ca21b-feea-4fc1-9ddc-99f144e4241a/1b2ca21b-feea-4fc1-9ddc-99f144e4241a.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2007.104058] env[63279]: DEBUG nova.compute.manager [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2007.104058] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2007.105538] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2007.105740] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2007.105963] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f379ad0-f3ba-4de3-8dad-909c806b3651 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.108515] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23aacf13-e359-4fcf-b570-7eae5b8cf1be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.111967] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2007.111967] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d9c4ca-220c-c8d3-6341-9799ffbc8087" [ 2007.111967] env[63279]: _type = "Task" [ 2007.111967] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.111967] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70698d2e-0232-4403-bfc7-bdee26037921 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.128030] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2007.128030] env[63279]: value = "task-2087225" [ 2007.128030] env[63279]: _type = "Task" [ 2007.128030] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.128030] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2007.128030] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2acdd104-4a2b-42b7-ba66-aa2dc5e32e51 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.139075] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d9c4ca-220c-c8d3-6341-9799ffbc8087, 'name': SearchDatastore_Task, 'duration_secs': 0.013669} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.140698] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2007.140935] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2007.141174] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2007.141403] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2007.141557] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2007.145578] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d7df182-c62a-4367-a754-9833aa33e7c1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.153755] env[63279]: DEBUG oslo_vmware.api [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2007.153755] env[63279]: value = "task-2087226" [ 2007.153755] env[63279]: _type = "Task" [ 2007.153755] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.153971] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087225, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.158909] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2007.158909] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5293da13-74f5-ba34-7d80-4b63cca57d37" [ 2007.158909] env[63279]: _type = "Task" [ 2007.158909] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.165906] env[63279]: DEBUG oslo_vmware.api [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087226, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.176410] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5293da13-74f5-ba34-7d80-4b63cca57d37, 'name': SearchDatastore_Task, 'duration_secs': 0.008725} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.176791] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eed2f15f-e967-4b21-bf70-2d54a55e9bac {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.183955] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2007.183955] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52826c92-2d33-06cd-3086-490fc49d1d61" [ 2007.183955] env[63279]: _type = "Task" [ 2007.183955] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.187723] env[63279]: DEBUG oslo_concurrency.lockutils [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] Releasing lock "refresh_cache-1b2ca21b-feea-4fc1-9ddc-99f144e4241a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2007.187975] env[63279]: DEBUG nova.compute.manager [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Received event network-changed-23e2695e-f865-4e3f-9f26-d5bd599cf889 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2007.188171] env[63279]: DEBUG nova.compute.manager [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Refreshing instance network info cache due to event network-changed-23e2695e-f865-4e3f-9f26-d5bd599cf889. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2007.191358] env[63279]: DEBUG oslo_concurrency.lockutils [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] Acquiring lock "refresh_cache-58392790-b297-4894-8d81-e5cbda69872b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2007.191358] env[63279]: DEBUG oslo_concurrency.lockutils [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] Acquired lock "refresh_cache-58392790-b297-4894-8d81-e5cbda69872b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2007.191358] env[63279]: DEBUG nova.network.neutron [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Refreshing network info cache for port 23e2695e-f865-4e3f-9f26-d5bd599cf889 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2007.198193] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52826c92-2d33-06cd-3086-490fc49d1d61, 'name': SearchDatastore_Task, 'duration_secs': 0.008441} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.202233] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2007.202618] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 58392790-b297-4894-8d81-e5cbda69872b/58392790-b297-4894-8d81-e5cbda69872b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2007.204376] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2007.204376] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2007.204761] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2bddce64-7f25-4df3-9517-57e2896b13d5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.211563] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cde38e5-ec07-46bc-a262-61281507f610 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.213795] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087223, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.219569] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2007.219569] env[63279]: value = "task-2087227" [ 2007.219569] env[63279]: _type = "Task" [ 2007.219569] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.223587] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2007.223752] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2007.224812] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2daa82b-7c91-4777-b15e-8270fd1c5404 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.230995] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087227, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.237169] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2007.237169] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a7e316-429b-14f9-9a01-32cec7b0866e" [ 2007.237169] env[63279]: _type = "Task" [ 2007.237169] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.246921] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a7e316-429b-14f9-9a01-32cec7b0866e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.313071] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2007.313272] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2007.313497] env[63279]: DEBUG nova.network.neutron [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2007.379724] env[63279]: DEBUG oslo_vmware.api [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087224, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189843} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.380211] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2007.380211] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2007.380370] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2007.380551] env[63279]: INFO nova.compute.manager [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Took 2.72 seconds to destroy the instance on the hypervisor. [ 2007.380809] env[63279]: DEBUG oslo.service.loopingcall [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2007.381019] env[63279]: DEBUG nova.compute.manager [-] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2007.381172] env[63279]: DEBUG nova.network.neutron [-] [instance: acf95fad-316c-4605-9008-24d4d7c05892] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2007.411657] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01afc5c-1f15-4eda-869e-138145c8c0aa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.419769] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585879d4-ab51-44c1-939a-02d45e0e5714 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.454059] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d956c6-a1df-4540-ba08-3521db2cc7be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.464085] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfecf93-585f-40c4-8e32-f945ede3dfcb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.479751] env[63279]: DEBUG nova.compute.provider_tree [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2007.639019] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087225, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505101} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.639019] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1b2ca21b-feea-4fc1-9ddc-99f144e4241a/1b2ca21b-feea-4fc1-9ddc-99f144e4241a.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2007.639019] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2007.639019] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffdefa9c-5804-4831-8ba3-5f350b66bb59 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.644458] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2007.644458] env[63279]: value = "task-2087228" [ 2007.644458] env[63279]: _type = "Task" [ 2007.644458] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.652118] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087228, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.669491] env[63279]: DEBUG oslo_vmware.api [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087226, 'name': PowerOffVM_Task, 'duration_secs': 0.182926} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.670103] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2007.670281] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2007.670655] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69c16c9b-6a28-46f3-8c76-e6b8f9e3dd5b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.705199] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087223, 'name': CreateVM_Task, 'duration_secs': 0.589987} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.705462] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2007.706282] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2007.706461] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2007.706789] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2007.707080] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c24b772f-2eeb-4e06-b242-9497341ad7d7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.715970] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2007.715970] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b20f51-88e3-0175-1f4f-930021d370c6" [ 2007.715970] env[63279]: _type = "Task" [ 2007.715970] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.736283] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b20f51-88e3-0175-1f4f-930021d370c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.736548] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087227, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.753579] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a7e316-429b-14f9-9a01-32cec7b0866e, 'name': SearchDatastore_Task, 'duration_secs': 0.010352} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.754672] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbd7a8a3-cea8-4a78-9682-45e96862de13 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.761590] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2007.761590] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bbc838-de9c-901d-a560-a9bf336426bd" [ 2007.761590] env[63279]: _type = "Task" [ 2007.761590] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.772753] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bbc838-de9c-901d-a560-a9bf336426bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.893686] env[63279]: DEBUG nova.network.neutron [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2007.947122] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2007.947495] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2007.947705] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Deleting the datastore file [datastore1] 32789822-cb54-43e7-beae-b5ed3002f4ad {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2007.952524] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4de4f108-e374-46dc-b825-a357163b9aba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.959381] env[63279]: DEBUG oslo_vmware.api [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2007.959381] env[63279]: value = "task-2087230" [ 2007.959381] env[63279]: _type = "Task" [ 2007.959381] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.969524] env[63279]: DEBUG oslo_vmware.api [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087230, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.982651] env[63279]: DEBUG nova.scheduler.client.report [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2008.153811] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087228, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088169} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.154092] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2008.154830] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3502f44b-a8ae-426e-ac22-c273c6b570e6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.176835] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 1b2ca21b-feea-4fc1-9ddc-99f144e4241a/1b2ca21b-feea-4fc1-9ddc-99f144e4241a.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2008.177112] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f320adf1-3081-45c9-bb2b-380bbc35fc76 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.196422] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2008.196422] env[63279]: value = "task-2087231" [ 2008.196422] env[63279]: _type = "Task" [ 2008.196422] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.204426] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087231, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.231989] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b20f51-88e3-0175-1f4f-930021d370c6, 'name': SearchDatastore_Task, 'duration_secs': 0.050032} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.237305] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.237305] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2008.237305] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2008.237305] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087227, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.732079} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.237718] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 58392790-b297-4894-8d81-e5cbda69872b/58392790-b297-4894-8d81-e5cbda69872b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2008.237718] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2008.237718] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-15b52097-8f4e-4000-b7b1-2f2297fe1390 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.244390] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2008.244390] env[63279]: value = "task-2087232" [ 2008.244390] env[63279]: _type = "Task" [ 2008.244390] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.254013] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087232, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.270854] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bbc838-de9c-901d-a560-a9bf336426bd, 'name': SearchDatastore_Task, 'duration_secs': 0.050639} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.271136] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.271395] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] fb124cfa-24b4-4712-b8cc-c87df5d6231b/fb124cfa-24b4-4712-b8cc-c87df5d6231b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2008.271678] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.271864] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2008.272093] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d99a249-72ef-4bf3-9749-66c13538a725 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.273976] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98a5d397-fc47-4042-9e15-a49dbc43ba0e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.280717] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2008.280717] env[63279]: value = "task-2087233" [ 2008.280717] env[63279]: _type = "Task" [ 2008.280717] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.284591] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2008.284859] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2008.285674] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bfdab68-f0a9-40c2-9985-4c78f2045081 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.290680] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087233, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.293463] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2008.293463] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52316930-4dfb-9b51-ef01-05c686780ec5" [ 2008.293463] env[63279]: _type = "Task" [ 2008.293463] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.300757] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52316930-4dfb-9b51-ef01-05c686780ec5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.320629] env[63279]: DEBUG nova.network.neutron [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Updated VIF entry in instance network info cache for port c2c5d3a4-c909-4508-97e5-c87eee8b7d50. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2008.320629] env[63279]: DEBUG nova.network.neutron [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Updating instance_info_cache with network_info: [{"id": "c2c5d3a4-c909-4508-97e5-c87eee8b7d50", "address": "fa:16:3e:e6:0a:1b", "network": {"id": "4f155366-ade7-4d4b-8fad-a2b55798980f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-669492310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651b8183237a4e6dbef36aa2fb419f1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2c5d3a4-c9", "ovs_interfaceid": "c2c5d3a4-c909-4508-97e5-c87eee8b7d50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2008.329349] env[63279]: DEBUG nova.network.neutron [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance_info_cache with network_info: [{"id": "cbbfde33-a0b6-4403-8a1e-d688a0a7147b", "address": "fa:16:3e:47:7e:05", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbbfde33-a0", "ovs_interfaceid": "cbbfde33-a0b6-4403-8a1e-d688a0a7147b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2008.332448] env[63279]: DEBUG nova.compute.manager [req-419615be-577d-48a4-a4ef-93e7d47fa0c3 req-a857c259-fab3-4116-8447-6cc44a1bf5da service nova] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Received event network-vif-plugged-cbbfde33-a0b6-4403-8a1e-d688a0a7147b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2008.332448] env[63279]: DEBUG oslo_concurrency.lockutils [req-419615be-577d-48a4-a4ef-93e7d47fa0c3 req-a857c259-fab3-4116-8447-6cc44a1bf5da service nova] Acquiring lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.332448] env[63279]: DEBUG oslo_concurrency.lockutils [req-419615be-577d-48a4-a4ef-93e7d47fa0c3 req-a857c259-fab3-4116-8447-6cc44a1bf5da service nova] Lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.332735] env[63279]: DEBUG oslo_concurrency.lockutils [req-419615be-577d-48a4-a4ef-93e7d47fa0c3 req-a857c259-fab3-4116-8447-6cc44a1bf5da service nova] Lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.332779] env[63279]: DEBUG nova.compute.manager [req-419615be-577d-48a4-a4ef-93e7d47fa0c3 req-a857c259-fab3-4116-8447-6cc44a1bf5da service nova] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] No waiting events found dispatching network-vif-plugged-cbbfde33-a0b6-4403-8a1e-d688a0a7147b {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2008.332924] env[63279]: WARNING nova.compute.manager [req-419615be-577d-48a4-a4ef-93e7d47fa0c3 req-a857c259-fab3-4116-8447-6cc44a1bf5da service nova] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Received unexpected event network-vif-plugged-cbbfde33-a0b6-4403-8a1e-d688a0a7147b for instance with vm_state building and task_state spawning. [ 2008.333100] env[63279]: DEBUG nova.compute.manager [req-419615be-577d-48a4-a4ef-93e7d47fa0c3 req-a857c259-fab3-4116-8447-6cc44a1bf5da service nova] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Received event network-changed-cbbfde33-a0b6-4403-8a1e-d688a0a7147b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2008.333257] env[63279]: DEBUG nova.compute.manager [req-419615be-577d-48a4-a4ef-93e7d47fa0c3 req-a857c259-fab3-4116-8447-6cc44a1bf5da service nova] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Refreshing instance network info cache due to event network-changed-cbbfde33-a0b6-4403-8a1e-d688a0a7147b. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2008.333426] env[63279]: DEBUG oslo_concurrency.lockutils [req-419615be-577d-48a4-a4ef-93e7d47fa0c3 req-a857c259-fab3-4116-8447-6cc44a1bf5da service nova] Acquiring lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2008.343048] env[63279]: DEBUG nova.network.neutron [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Updated VIF entry in instance network info cache for port 23e2695e-f865-4e3f-9f26-d5bd599cf889. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2008.343479] env[63279]: DEBUG nova.network.neutron [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Updating instance_info_cache with network_info: [{"id": "23e2695e-f865-4e3f-9f26-d5bd599cf889", "address": "fa:16:3e:f9:66:15", "network": {"id": "548d80cd-fb6c-47fc-8c1d-036889987399", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-219167599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f6c6f65521a440fb80278bbff2d0ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e2695e-f8", "ovs_interfaceid": "23e2695e-f865-4e3f-9f26-d5bd599cf889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2008.476084] env[63279]: DEBUG oslo_vmware.api [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087230, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132412} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.477066] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2008.477066] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2008.477066] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2008.477341] env[63279]: INFO nova.compute.manager [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Took 1.37 seconds to destroy the instance on the hypervisor. [ 2008.477437] env[63279]: DEBUG oslo.service.loopingcall [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2008.477589] env[63279]: DEBUG nova.compute.manager [-] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2008.477686] env[63279]: DEBUG nova.network.neutron [-] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2008.488145] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.706s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.488796] env[63279]: DEBUG nova.compute.manager [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2008.492501] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.614s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.494186] env[63279]: INFO nova.compute.claims [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2008.707603] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087231, 'name': ReconfigVM_Task, 'duration_secs': 0.314492} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.707913] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 1b2ca21b-feea-4fc1-9ddc-99f144e4241a/1b2ca21b-feea-4fc1-9ddc-99f144e4241a.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2008.708678] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14b8f552-ac4e-4789-bc23-bddecfa655bf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.715496] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2008.715496] env[63279]: value = "task-2087234" [ 2008.715496] env[63279]: _type = "Task" [ 2008.715496] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.726776] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087234, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.753659] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087232, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060366} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.753930] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2008.754710] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d2f6c0-5c80-4c19-a635-98357e5b2b7f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.778251] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 58392790-b297-4894-8d81-e5cbda69872b/58392790-b297-4894-8d81-e5cbda69872b.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2008.778635] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86f5ae6b-27c5-4c22-bbb5-ac1b2ecb27c1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.801374] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2008.801374] env[63279]: value = "task-2087235" [ 2008.801374] env[63279]: _type = "Task" [ 2008.801374] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.808428] env[63279]: DEBUG nova.network.neutron [-] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2008.809669] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52316930-4dfb-9b51-ef01-05c686780ec5, 'name': SearchDatastore_Task, 'duration_secs': 0.008895} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.809866] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087233, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44933} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.813859] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] fb124cfa-24b4-4712-b8cc-c87df5d6231b/fb124cfa-24b4-4712-b8cc-c87df5d6231b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2008.814190] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2008.814591] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d18014ff-64a7-4d31-9d4c-1f628cff08c6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.816810] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9453886b-db02-4fa9-95c5-53466bf9ff67 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.823896] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Releasing lock "refresh_cache-8ccb4293-927a-45ba-82e9-9f1b4d5985cc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.824205] env[63279]: DEBUG nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Received event network-vif-plugged-a84d472c-4f07-4a61-8eca-135f97267755 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2008.824412] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Acquiring lock "64e92bfc-c0d0-4918-9ba2-45ffedbf7e39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.824628] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Lock "64e92bfc-c0d0-4918-9ba2-45ffedbf7e39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.824795] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Lock "64e92bfc-c0d0-4918-9ba2-45ffedbf7e39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.824964] env[63279]: DEBUG nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] No waiting events found dispatching network-vif-plugged-a84d472c-4f07-4a61-8eca-135f97267755 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2008.825172] env[63279]: WARNING nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Received unexpected event network-vif-plugged-a84d472c-4f07-4a61-8eca-135f97267755 for instance with vm_state building and task_state spawning. [ 2008.825324] env[63279]: DEBUG nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Received event network-changed-a84d472c-4f07-4a61-8eca-135f97267755 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2008.825482] env[63279]: DEBUG nova.compute.manager [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Refreshing instance network info cache due to event network-changed-a84d472c-4f07-4a61-8eca-135f97267755. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2008.825764] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Acquiring lock "refresh_cache-64e92bfc-c0d0-4918-9ba2-45ffedbf7e39" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2008.825858] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Acquired lock "refresh_cache-64e92bfc-c0d0-4918-9ba2-45ffedbf7e39" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.826019] env[63279]: DEBUG nova.network.neutron [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Refreshing network info cache for port a84d472c-4f07-4a61-8eca-135f97267755 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2008.827329] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087235, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.830456] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2008.830456] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]522d8612-b958-e975-99dd-2d61e8fabeb3" [ 2008.830456] env[63279]: _type = "Task" [ 2008.830456] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.830660] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2008.830660] env[63279]: value = "task-2087236" [ 2008.830660] env[63279]: _type = "Task" [ 2008.830660] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.837293] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Releasing lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.837422] env[63279]: DEBUG nova.compute.manager [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Instance network_info: |[{"id": "cbbfde33-a0b6-4403-8a1e-d688a0a7147b", "address": "fa:16:3e:47:7e:05", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbbfde33-a0", "ovs_interfaceid": "cbbfde33-a0b6-4403-8a1e-d688a0a7147b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2008.838196] env[63279]: DEBUG oslo_concurrency.lockutils [req-419615be-577d-48a4-a4ef-93e7d47fa0c3 req-a857c259-fab3-4116-8447-6cc44a1bf5da service nova] Acquired lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.838407] env[63279]: DEBUG nova.network.neutron [req-419615be-577d-48a4-a4ef-93e7d47fa0c3 req-a857c259-fab3-4116-8447-6cc44a1bf5da service nova] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Refreshing network info cache for port cbbfde33-a0b6-4403-8a1e-d688a0a7147b {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2008.839812] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:7e:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cbbfde33-a0b6-4403-8a1e-d688a0a7147b', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2008.847449] env[63279]: DEBUG oslo.service.loopingcall [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2008.853837] env[63279]: DEBUG oslo_concurrency.lockutils [req-1b37797b-c5e6-489d-b87a-ac39e649b32f req-6b15be0c-504b-49d9-a537-1700cf051b24 service nova] Releasing lock "refresh_cache-58392790-b297-4894-8d81-e5cbda69872b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.853837] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2008.854061] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]522d8612-b958-e975-99dd-2d61e8fabeb3, 'name': SearchDatastore_Task, 'duration_secs': 0.01112} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.857556] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d35d4c7a-a9eb-4469-b4b5-fc8d16b75c41 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.873510] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.873795] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 8ccb4293-927a-45ba-82e9-9f1b4d5985cc/8ccb4293-927a-45ba-82e9-9f1b4d5985cc.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2008.874130] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087236, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.874799] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.875019] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2008.875263] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-847cbe0e-518e-4e12-969e-904b75d30e6a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.877658] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c373534-b70e-47ba-a2ac-e05f5943ea6d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.883282] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2008.883282] env[63279]: value = "task-2087237" [ 2008.883282] env[63279]: _type = "Task" [ 2008.883282] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.884721] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2008.884721] env[63279]: value = "task-2087238" [ 2008.884721] env[63279]: _type = "Task" [ 2008.884721] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.893088] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2008.893302] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2008.896668] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8878d1c-5165-4460-94ac-53c01cf80c20 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.903144] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087237, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.908573] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087238, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.910079] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2008.910079] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5240f7f0-d9cc-6a48-ef90-9daea5241372" [ 2008.910079] env[63279]: _type = "Task" [ 2008.910079] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.919767] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5240f7f0-d9cc-6a48-ef90-9daea5241372, 'name': SearchDatastore_Task, 'duration_secs': 0.008997} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.922435] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89d836d3-7a28-4b54-9e17-eebacc928cfc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.927348] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2008.927348] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52e0e44c-587d-bafd-7f7c-419afe36244a" [ 2008.927348] env[63279]: _type = "Task" [ 2008.927348] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.935324] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e0e44c-587d-bafd-7f7c-419afe36244a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.000011] env[63279]: DEBUG nova.compute.utils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2009.003313] env[63279]: DEBUG nova.compute.manager [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2009.003313] env[63279]: DEBUG nova.network.neutron [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2009.059310] env[63279]: DEBUG nova.policy [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4c0a587c8aa4d208b804b1c5f9cabaa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '36142278730f4f6b87fc9a51283b5cdf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2009.231508] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087234, 'name': Rename_Task, 'duration_secs': 0.136526} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.231766] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2009.232325] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1755aebf-8441-43a1-abb9-e669e24f7626 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.243600] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2009.243600] env[63279]: value = "task-2087239" [ 2009.243600] env[63279]: _type = "Task" [ 2009.243600] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.255424] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087239, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.273856] env[63279]: DEBUG nova.compute.manager [req-a4be5275-5d42-41d9-8881-e8e40ec1e252 req-e7a83533-4447-4fc0-9ca5-7b6e7a5d7083 service nova] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Received event network-vif-deleted-271bdcf3-0171-47b9-8e8f-e190604053fd {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2009.274335] env[63279]: INFO nova.compute.manager [req-a4be5275-5d42-41d9-8881-e8e40ec1e252 req-e7a83533-4447-4fc0-9ca5-7b6e7a5d7083 service nova] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Neutron deleted interface 271bdcf3-0171-47b9-8e8f-e190604053fd; detaching it from the instance and deleting it from the info cache [ 2009.274565] env[63279]: DEBUG nova.network.neutron [req-a4be5275-5d42-41d9-8881-e8e40ec1e252 req-e7a83533-4447-4fc0-9ca5-7b6e7a5d7083 service nova] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2009.315575] env[63279]: INFO nova.compute.manager [-] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Took 1.93 seconds to deallocate network for instance. [ 2009.326229] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087235, 'name': ReconfigVM_Task, 'duration_secs': 0.278354} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.327147] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 58392790-b297-4894-8d81-e5cbda69872b/58392790-b297-4894-8d81-e5cbda69872b.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2009.330720] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-159445eb-b757-4d1c-b54d-62674fadda71 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.347633] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087236, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06494} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.348846] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2009.349611] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2009.349611] env[63279]: value = "task-2087240" [ 2009.349611] env[63279]: _type = "Task" [ 2009.349611] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.350708] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e80da6-6867-47e2-8279-195fe481a144 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.390016] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] fb124cfa-24b4-4712-b8cc-c87df5d6231b/fb124cfa-24b4-4712-b8cc-c87df5d6231b.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2009.392700] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-319ecd55-90fa-42ff-b877-5bc7f4d227ab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.413784] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087240, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.426589] env[63279]: DEBUG nova.network.neutron [-] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2009.443043] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087237, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.443458] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2009.443458] env[63279]: value = "task-2087241" [ 2009.443458] env[63279]: _type = "Task" [ 2009.443458] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.443712] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087238, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48279} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.445331] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 8ccb4293-927a-45ba-82e9-9f1b4d5985cc/8ccb4293-927a-45ba-82e9-9f1b4d5985cc.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2009.445590] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2009.449376] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e8d330f-4e60-4efe-9a76-a5409b45f69d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.455605] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e0e44c-587d-bafd-7f7c-419afe36244a, 'name': SearchDatastore_Task, 'duration_secs': 0.009323} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.456751] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.457198] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39/64e92bfc-c0d0-4918-9ba2-45ffedbf7e39.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2009.457344] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42c3746c-de46-45b9-8f86-629294eca496 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.465094] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2009.465094] env[63279]: value = "task-2087242" [ 2009.465094] env[63279]: _type = "Task" [ 2009.465094] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.465322] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087241, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.471036] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2009.471036] env[63279]: value = "task-2087243" [ 2009.471036] env[63279]: _type = "Task" [ 2009.471036] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.480124] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087242, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.486636] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.510382] env[63279]: DEBUG nova.compute.manager [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2009.760281] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087239, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.778483] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ccdf1478-af84-4699-b2a7-98a75e96dd47 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.790255] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0cc84e-2a15-415a-851a-74cf350f1763 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.821486] env[63279]: DEBUG nova.network.neutron [req-419615be-577d-48a4-a4ef-93e7d47fa0c3 req-a857c259-fab3-4116-8447-6cc44a1bf5da service nova] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updated VIF entry in instance network info cache for port cbbfde33-a0b6-4403-8a1e-d688a0a7147b. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2009.821933] env[63279]: DEBUG nova.network.neutron [req-419615be-577d-48a4-a4ef-93e7d47fa0c3 req-a857c259-fab3-4116-8447-6cc44a1bf5da service nova] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance_info_cache with network_info: [{"id": "cbbfde33-a0b6-4403-8a1e-d688a0a7147b", "address": "fa:16:3e:47:7e:05", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbbfde33-a0", "ovs_interfaceid": "cbbfde33-a0b6-4403-8a1e-d688a0a7147b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2009.839758] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2009.840691] env[63279]: DEBUG nova.compute.manager [req-a4be5275-5d42-41d9-8881-e8e40ec1e252 req-e7a83533-4447-4fc0-9ca5-7b6e7a5d7083 service nova] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Detach interface failed, port_id=271bdcf3-0171-47b9-8e8f-e190604053fd, reason: Instance 32789822-cb54-43e7-beae-b5ed3002f4ad could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2009.841729] env[63279]: DEBUG oslo_concurrency.lockutils [req-419615be-577d-48a4-a4ef-93e7d47fa0c3 req-a857c259-fab3-4116-8447-6cc44a1bf5da service nova] Releasing lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.870223] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087240, 'name': Rename_Task, 'duration_secs': 0.161623} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.873321] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2009.873859] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-80d484b1-36a8-4253-8641-3bff0caf45b0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.883189] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2009.883189] env[63279]: value = "task-2087244" [ 2009.883189] env[63279]: _type = "Task" [ 2009.883189] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.897810] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087244, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.908866] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087237, 'name': CreateVM_Task, 'duration_secs': 0.564538} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.909652] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2009.910977] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2009.910977] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2009.910977] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2009.911663] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76f6c202-89c7-4b91-b144-b44a9838a249 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.917538] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2009.917538] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5258106d-e86a-f097-6241-94b75a7c7360" [ 2009.917538] env[63279]: _type = "Task" [ 2009.917538] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.931862] env[63279]: INFO nova.compute.manager [-] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Took 1.45 seconds to deallocate network for instance. [ 2009.932472] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5258106d-e86a-f097-6241-94b75a7c7360, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.956485] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087241, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.980878] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087242, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.129285} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.982057] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2009.982212] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5f1498-31ad-44aa-afa6-834779bb1766 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.993526] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087243, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.021807] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 8ccb4293-927a-45ba-82e9-9f1b4d5985cc/8ccb4293-927a-45ba-82e9-9f1b4d5985cc.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2010.029755] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ceac95f-ce9d-4c01-8072-6c4bc0997c66 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.054990] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2010.054990] env[63279]: value = "task-2087245" [ 2010.054990] env[63279]: _type = "Task" [ 2010.054990] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.066296] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087245, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.106033] env[63279]: DEBUG nova.network.neutron [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Successfully created port: 4805f07f-4add-493e-80cb-e2c75cc21104 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2010.204856] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da95d529-8db7-4fef-bd9f-7339f484c530 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.213241] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f658f1c-657d-463f-8c93-3cf510eae62b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.218533] env[63279]: DEBUG nova.network.neutron [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Updated VIF entry in instance network info cache for port a84d472c-4f07-4a61-8eca-135f97267755. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2010.218943] env[63279]: DEBUG nova.network.neutron [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Updating instance_info_cache with network_info: [{"id": "a84d472c-4f07-4a61-8eca-135f97267755", "address": "fa:16:3e:f6:de:1a", "network": {"id": "6d7c4c21-3389-4688-b373-329b2328f275", "bridge": "br-int", "label": "tempest-ImagesTestJSON-311425692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77ee2145dda94e2b85eeb7379ed98e26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa84d472c-4f", "ovs_interfaceid": "a84d472c-4f07-4a61-8eca-135f97267755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2010.251251] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7612aa2b-5d65-4050-920a-8a7840aaa30e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.261076] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087239, 'name': PowerOnVM_Task, 'duration_secs': 0.683416} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.262255] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a0271f-093b-424b-8e6f-081e207ba52b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.266633] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2010.266783] env[63279]: INFO nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Took 18.06 seconds to spawn the instance on the hypervisor. [ 2010.266951] env[63279]: DEBUG nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2010.269309] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e093846-42b6-4eb6-9b64-adc43c1a6496 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.286272] env[63279]: DEBUG nova.compute.provider_tree [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2010.395783] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087244, 'name': PowerOnVM_Task} progress is 71%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.428976] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5258106d-e86a-f097-6241-94b75a7c7360, 'name': SearchDatastore_Task, 'duration_secs': 0.06238} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.429848] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2010.429848] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2010.430025] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.430210] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2010.430421] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2010.430714] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98cf250e-b4d8-4489-8837-64e58d9ac74c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.440174] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2010.440381] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2010.441170] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fc6f420-7926-457f-ab5e-6017d7cb9990 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.444161] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.451183] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2010.451183] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521017c5-5cbc-c153-7607-f1bb04c4a6bb" [ 2010.451183] env[63279]: _type = "Task" [ 2010.451183] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.458129] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087241, 'name': ReconfigVM_Task, 'duration_secs': 0.543571} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.458837] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Reconfigured VM instance instance-00000032 to attach disk [datastore1] fb124cfa-24b4-4712-b8cc-c87df5d6231b/fb124cfa-24b4-4712-b8cc-c87df5d6231b.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2010.459473] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0077ea8b-ad5b-4143-9205-6f1e634b89fa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.464582] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521017c5-5cbc-c153-7607-f1bb04c4a6bb, 'name': SearchDatastore_Task, 'duration_secs': 0.009221} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.465666] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06221386-6903-46dc-b445-434d573d5c0b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.470611] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2010.470611] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bb65fc-ce95-d8af-5767-c1b71125adb9" [ 2010.470611] env[63279]: _type = "Task" [ 2010.470611] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.471894] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2010.471894] env[63279]: value = "task-2087246" [ 2010.471894] env[63279]: _type = "Task" [ 2010.471894] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.486510] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bb65fc-ce95-d8af-5767-c1b71125adb9, 'name': SearchDatastore_Task, 'duration_secs': 0.008554} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.492675] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2010.492953] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b/f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2010.493250] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087246, 'name': Rename_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.493478] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087243, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.629561} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.494137] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b4b3903-ed07-4376-9512-362ac2eeda3a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.496147] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39/64e92bfc-c0d0-4918-9ba2-45ffedbf7e39.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2010.496920] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2010.496920] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af047093-c42d-44cb-868f-24ef1ef7184d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.502805] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2010.502805] env[63279]: value = "task-2087247" [ 2010.502805] env[63279]: _type = "Task" [ 2010.502805] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.506970] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2010.506970] env[63279]: value = "task-2087248" [ 2010.506970] env[63279]: _type = "Task" [ 2010.506970] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.514921] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087247, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.519783] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087248, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.548654] env[63279]: DEBUG nova.compute.manager [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2010.568746] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087245, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.584184] env[63279]: DEBUG nova.virt.hardware [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2010.584456] env[63279]: DEBUG nova.virt.hardware [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2010.584620] env[63279]: DEBUG nova.virt.hardware [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2010.584804] env[63279]: DEBUG nova.virt.hardware [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2010.584975] env[63279]: DEBUG nova.virt.hardware [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2010.585112] env[63279]: DEBUG nova.virt.hardware [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2010.585325] env[63279]: DEBUG nova.virt.hardware [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2010.585488] env[63279]: DEBUG nova.virt.hardware [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2010.585658] env[63279]: DEBUG nova.virt.hardware [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2010.585823] env[63279]: DEBUG nova.virt.hardware [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2010.585998] env[63279]: DEBUG nova.virt.hardware [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2010.587181] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da25fd3d-c252-4eec-bb49-7d136dab2fd8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.594857] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91bd3476-8b84-4178-8bb3-ca321daf6630 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.612252] env[63279]: DEBUG nova.compute.manager [req-1b25d7ba-7580-46fb-8547-9175b76e0637 req-5458d3c5-1ba4-41b9-94e9-e906b0c176cb service nova] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Received event network-vif-deleted-efb38616-854e-4529-ac78-98f777f045f6 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2010.722248] env[63279]: DEBUG oslo_concurrency.lockutils [req-2f1b32a5-e172-4cea-87c9-89ee3504319e req-55b6fe6d-335a-49b2-abec-82eb1f546ad9 service nova] Releasing lock "refresh_cache-64e92bfc-c0d0-4918-9ba2-45ffedbf7e39" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2010.796699] env[63279]: INFO nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Took 55.26 seconds to build instance. [ 2010.826666] env[63279]: ERROR nova.scheduler.client.report [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [req-b78be379-2cb2-49ea-91f3-ef0d5f0800e9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b78be379-2cb2-49ea-91f3-ef0d5f0800e9"}]} [ 2010.854677] env[63279]: DEBUG nova.scheduler.client.report [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2010.873498] env[63279]: DEBUG nova.scheduler.client.report [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2010.873750] env[63279]: DEBUG nova.compute.provider_tree [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2010.899832] env[63279]: DEBUG oslo_vmware.api [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087244, 'name': PowerOnVM_Task, 'duration_secs': 0.92669} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.901168] env[63279]: DEBUG nova.scheduler.client.report [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2010.903946] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2010.904249] env[63279]: INFO nova.compute.manager [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Took 13.31 seconds to spawn the instance on the hypervisor. [ 2010.904604] env[63279]: DEBUG nova.compute.manager [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2010.907035] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58e9321-b427-41ff-9b06-7a12b86057a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.925174] env[63279]: DEBUG nova.scheduler.client.report [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2010.991214] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087246, 'name': Rename_Task, 'duration_secs': 0.335047} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.991839] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2010.991839] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cbb92eae-3704-442d-9e83-e730c314f379 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.000432] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2011.000432] env[63279]: value = "task-2087249" [ 2011.000432] env[63279]: _type = "Task" [ 2011.000432] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.016099] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087249, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.019372] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087247, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.024050] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087248, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073763} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.024388] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2011.025186] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8b0787-9089-4da9-867e-05ea6bea01b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.051023] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39/64e92bfc-c0d0-4918-9ba2-45ffedbf7e39.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2011.052659] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b935e032-04b3-46b0-9bc9-7325a18bea7d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.077352] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087245, 'name': ReconfigVM_Task, 'duration_secs': 0.77316} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.080979] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 8ccb4293-927a-45ba-82e9-9f1b4d5985cc/8ccb4293-927a-45ba-82e9-9f1b4d5985cc.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2011.081819] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2011.081819] env[63279]: value = "task-2087250" [ 2011.081819] env[63279]: _type = "Task" [ 2011.081819] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.082070] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08da33d3-c951-46a8-ac7d-13ee15e5cb43 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.092028] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087250, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.095919] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2011.095919] env[63279]: value = "task-2087251" [ 2011.095919] env[63279]: _type = "Task" [ 2011.095919] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.106122] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087251, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.299253] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "1b2ca21b-feea-4fc1-9ddc-99f144e4241a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 118.851s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2011.431591] env[63279]: INFO nova.compute.manager [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Took 51.33 seconds to build instance. [ 2011.512274] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a5f1ba-f2a9-4b39-a760-c57a15bc6c4c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.521675] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087247, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517227} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.522373] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087249, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.523302] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b/f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2011.523302] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2011.523969] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2563afe-9021-459f-9232-0d6c48331cab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.528574] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a4b398-854d-4387-89b6-bc5f36a2df67 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.532849] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2011.532849] env[63279]: value = "task-2087252" [ 2011.532849] env[63279]: _type = "Task" [ 2011.532849] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.566410] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1f94c5-ef21-49e7-a160-59475a48a49f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.571988] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087252, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.578166] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc9e4af-6ca9-413c-aea3-99fc23e834f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.597965] env[63279]: DEBUG nova.compute.provider_tree [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2011.604837] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087250, 'name': ReconfigVM_Task, 'duration_secs': 0.280612} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.605430] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39/64e92bfc-c0d0-4918-9ba2-45ffedbf7e39.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2011.606585] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-832b7fc9-7ed8-46f0-8c1f-307b55c7e6c3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.611038] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087251, 'name': Rename_Task, 'duration_secs': 0.229199} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.611649] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2011.611927] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74ebc9dd-aa24-4935-bd8a-74fcf7d54d30 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.615635] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2011.615635] env[63279]: value = "task-2087253" [ 2011.615635] env[63279]: _type = "Task" [ 2011.615635] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.620774] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2011.620774] env[63279]: value = "task-2087254" [ 2011.620774] env[63279]: _type = "Task" [ 2011.620774] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.628208] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087253, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.633209] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087254, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.806865] env[63279]: DEBUG nova.compute.manager [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2011.936426] env[63279]: DEBUG oslo_concurrency.lockutils [None req-71c39255-559f-4f75-b101-e68493303b43 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "58392790-b297-4894-8d81-e5cbda69872b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.130s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.015904] env[63279]: DEBUG oslo_vmware.api [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087249, 'name': PowerOnVM_Task, 'duration_secs': 0.964367} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.016846] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2012.017142] env[63279]: INFO nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Took 17.13 seconds to spawn the instance on the hypervisor. [ 2012.017536] env[63279]: DEBUG nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2012.018849] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa28b83f-bf07-486a-8c2f-4abfc93cbd97 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.043026] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087252, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064723} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.043265] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2012.044123] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232dc1be-71ef-4993-85fc-e1fd13fecc57 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.067295] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b/f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2012.068176] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9f1ccaf-8889-43ef-a491-8d2d8d92cb5e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.089860] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d29f70eb-0f9b-4a4a-986d-5deb55879e64 tempest-ServersAdminTestJSON-1741344368 tempest-ServersAdminTestJSON-1741344368-project-admin] Acquiring lock "refresh_cache-58392790-b297-4894-8d81-e5cbda69872b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.090058] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d29f70eb-0f9b-4a4a-986d-5deb55879e64 tempest-ServersAdminTestJSON-1741344368 tempest-ServersAdminTestJSON-1741344368-project-admin] Acquired lock "refresh_cache-58392790-b297-4894-8d81-e5cbda69872b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.090230] env[63279]: DEBUG nova.network.neutron [None req-d29f70eb-0f9b-4a4a-986d-5deb55879e64 tempest-ServersAdminTestJSON-1741344368 tempest-ServersAdminTestJSON-1741344368-project-admin] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2012.094680] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2012.094680] env[63279]: value = "task-2087255" [ 2012.094680] env[63279]: _type = "Task" [ 2012.094680] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.104653] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087255, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.133403] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087253, 'name': Rename_Task, 'duration_secs': 0.138308} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.136985] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2012.137290] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087254, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.137852] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b292ccc7-4899-4f57-a741-55a17745686f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.147225] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2012.147225] env[63279]: value = "task-2087256" [ 2012.147225] env[63279]: _type = "Task" [ 2012.147225] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.158023] env[63279]: DEBUG nova.scheduler.client.report [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 81 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2012.158023] env[63279]: DEBUG nova.compute.provider_tree [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 81 to 82 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2012.159378] env[63279]: DEBUG nova.compute.provider_tree [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2012.167847] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087256, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.177454] env[63279]: DEBUG nova.network.neutron [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Successfully updated port: 4805f07f-4add-493e-80cb-e2c75cc21104 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2012.187836] env[63279]: DEBUG nova.compute.manager [req-be36fdd7-21a2-4560-8b58-9f4088c681ab req-381d6c6b-2080-47d2-ad75-22d7e93a341a service nova] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Received event network-vif-plugged-4805f07f-4add-493e-80cb-e2c75cc21104 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2012.188223] env[63279]: DEBUG oslo_concurrency.lockutils [req-be36fdd7-21a2-4560-8b58-9f4088c681ab req-381d6c6b-2080-47d2-ad75-22d7e93a341a service nova] Acquiring lock "ff9701ed-d545-44b4-911a-c4d809d0a771-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.188322] env[63279]: DEBUG oslo_concurrency.lockutils [req-be36fdd7-21a2-4560-8b58-9f4088c681ab req-381d6c6b-2080-47d2-ad75-22d7e93a341a service nova] Lock "ff9701ed-d545-44b4-911a-c4d809d0a771-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.188473] env[63279]: DEBUG oslo_concurrency.lockutils [req-be36fdd7-21a2-4560-8b58-9f4088c681ab req-381d6c6b-2080-47d2-ad75-22d7e93a341a service nova] Lock "ff9701ed-d545-44b4-911a-c4d809d0a771-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.188700] env[63279]: DEBUG nova.compute.manager [req-be36fdd7-21a2-4560-8b58-9f4088c681ab req-381d6c6b-2080-47d2-ad75-22d7e93a341a service nova] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] No waiting events found dispatching network-vif-plugged-4805f07f-4add-493e-80cb-e2c75cc21104 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2012.188882] env[63279]: WARNING nova.compute.manager [req-be36fdd7-21a2-4560-8b58-9f4088c681ab req-381d6c6b-2080-47d2-ad75-22d7e93a341a service nova] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Received unexpected event network-vif-plugged-4805f07f-4add-493e-80cb-e2c75cc21104 for instance with vm_state building and task_state spawning. [ 2012.332414] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.440053] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2012.545263] env[63279]: INFO nova.compute.manager [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Took 54.37 seconds to build instance. [ 2012.606875] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087255, 'name': ReconfigVM_Task, 'duration_secs': 0.296486} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.607667] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Reconfigured VM instance instance-00000036 to attach disk [datastore1] f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b/f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2012.608137] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3afdbd76-b79c-4569-8bac-6dbbb75f05bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.615821] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2012.615821] env[63279]: value = "task-2087257" [ 2012.615821] env[63279]: _type = "Task" [ 2012.615821] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.625150] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087257, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.634029] env[63279]: DEBUG oslo_vmware.api [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087254, 'name': PowerOnVM_Task, 'duration_secs': 0.809089} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.634029] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2012.634029] env[63279]: INFO nova.compute.manager [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Took 12.47 seconds to spawn the instance on the hypervisor. [ 2012.634230] env[63279]: DEBUG nova.compute.manager [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2012.634987] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9eaf80-08e2-4e1b-b700-7c8f7a301c05 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.656838] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087256, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.663613] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.171s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.664168] env[63279]: DEBUG nova.compute.manager [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2012.666932] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.031s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.667456] env[63279]: DEBUG nova.objects.instance [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lazy-loading 'resources' on Instance uuid ba2d6111-d93d-4216-b641-864b542ea253 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2012.681222] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Acquiring lock "refresh_cache-ff9701ed-d545-44b4-911a-c4d809d0a771" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.681222] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Acquired lock "refresh_cache-ff9701ed-d545-44b4-911a-c4d809d0a771" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.681222] env[63279]: DEBUG nova.network.neutron [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2012.966849] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2013.004698] env[63279]: DEBUG nova.network.neutron [None req-d29f70eb-0f9b-4a4a-986d-5deb55879e64 tempest-ServersAdminTestJSON-1741344368 tempest-ServersAdminTestJSON-1741344368-project-admin] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Updating instance_info_cache with network_info: [{"id": "23e2695e-f865-4e3f-9f26-d5bd599cf889", "address": "fa:16:3e:f9:66:15", "network": {"id": "548d80cd-fb6c-47fc-8c1d-036889987399", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-219167599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f6c6f65521a440fb80278bbff2d0ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e2695e-f8", "ovs_interfaceid": "23e2695e-f865-4e3f-9f26-d5bd599cf889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2013.048917] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fef03c7-ebe2-4875-b420-6389c982d497 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "fb124cfa-24b4-4712-b8cc-c87df5d6231b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 120.578s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2013.126812] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087257, 'name': Rename_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.158439] env[63279]: INFO nova.compute.manager [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Took 42.23 seconds to build instance. [ 2013.164813] env[63279]: DEBUG oslo_vmware.api [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087256, 'name': PowerOnVM_Task, 'duration_secs': 0.596478} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.165481] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2013.165481] env[63279]: INFO nova.compute.manager [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Took 10.27 seconds to spawn the instance on the hypervisor. [ 2013.165481] env[63279]: DEBUG nova.compute.manager [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2013.166724] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797f151d-74e2-4598-9185-ba11ca93c18b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.171279] env[63279]: DEBUG nova.compute.utils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2013.178035] env[63279]: DEBUG nova.compute.manager [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2013.178035] env[63279]: DEBUG nova.network.neutron [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2013.245763] env[63279]: DEBUG nova.network.neutron [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2013.269984] env[63279]: DEBUG nova.policy [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0ac7413a93e48ca979af98a90b38cc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e931e0ddf1b840fdb8743fbdba05b28d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2013.509029] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d29f70eb-0f9b-4a4a-986d-5deb55879e64 tempest-ServersAdminTestJSON-1741344368 tempest-ServersAdminTestJSON-1741344368-project-admin] Releasing lock "refresh_cache-58392790-b297-4894-8d81-e5cbda69872b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2013.509423] env[63279]: DEBUG nova.compute.manager [None req-d29f70eb-0f9b-4a4a-986d-5deb55879e64 tempest-ServersAdminTestJSON-1741344368 tempest-ServersAdminTestJSON-1741344368-project-admin] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Inject network info {{(pid=63279) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 2013.509735] env[63279]: DEBUG nova.compute.manager [None req-d29f70eb-0f9b-4a4a-986d-5deb55879e64 tempest-ServersAdminTestJSON-1741344368 tempest-ServersAdminTestJSON-1741344368-project-admin] [instance: 58392790-b297-4894-8d81-e5cbda69872b] network_info to inject: |[{"id": "23e2695e-f865-4e3f-9f26-d5bd599cf889", "address": "fa:16:3e:f9:66:15", "network": {"id": "548d80cd-fb6c-47fc-8c1d-036889987399", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-219167599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f6c6f65521a440fb80278bbff2d0ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e2695e-f8", "ovs_interfaceid": "23e2695e-f865-4e3f-9f26-d5bd599cf889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 2013.514665] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d29f70eb-0f9b-4a4a-986d-5deb55879e64 tempest-ServersAdminTestJSON-1741344368 tempest-ServersAdminTestJSON-1741344368-project-admin] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Reconfiguring VM instance to set the machine id {{(pid=63279) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 2013.517478] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52caaef0-ae26-4f9b-a4f5-e60f1ebd2e87 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.536023] env[63279]: DEBUG oslo_vmware.api [None req-d29f70eb-0f9b-4a4a-986d-5deb55879e64 tempest-ServersAdminTestJSON-1741344368 tempest-ServersAdminTestJSON-1741344368-project-admin] Waiting for the task: (returnval){ [ 2013.536023] env[63279]: value = "task-2087258" [ 2013.536023] env[63279]: _type = "Task" [ 2013.536023] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.545454] env[63279]: DEBUG oslo_vmware.api [None req-d29f70eb-0f9b-4a4a-986d-5deb55879e64 tempest-ServersAdminTestJSON-1741344368 tempest-ServersAdminTestJSON-1741344368-project-admin] Task: {'id': task-2087258, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.551800] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2013.584390] env[63279]: DEBUG nova.network.neutron [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Updating instance_info_cache with network_info: [{"id": "4805f07f-4add-493e-80cb-e2c75cc21104", "address": "fa:16:3e:5e:52:6b", "network": {"id": "d63e8deb-e00b-4d35-a960-daa5c579e8e1", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-343913389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36142278730f4f6b87fc9a51283b5cdf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4805f07f-4a", "ovs_interfaceid": "4805f07f-4add-493e-80cb-e2c75cc21104", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2013.628224] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087257, 'name': Rename_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.662854] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b1863be-7a23-490d-9c3e-8eb295bf6202 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "8ccb4293-927a-45ba-82e9-9f1b4d5985cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.191s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2013.676570] env[63279]: DEBUG nova.compute.utils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2013.701870] env[63279]: INFO nova.compute.manager [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Took 41.11 seconds to build instance. [ 2013.796100] env[63279]: DEBUG nova.network.neutron [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Successfully created port: d31aaccc-4f75-4bc4-898d-0f2680b13372 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2013.818609] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf794d69-4641-41ad-8783-df9006be2d5f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.827750] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eab5eec-e1d3-4a4e-96f8-0054d4c7f401 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.861823] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8c7752-7d58-42a0-a0ff-48dda5910545 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.869767] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7654293c-126e-43d9-ab25-49511b9ec403 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.885268] env[63279]: DEBUG nova.compute.provider_tree [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2014.046219] env[63279]: DEBUG oslo_vmware.api [None req-d29f70eb-0f9b-4a4a-986d-5deb55879e64 tempest-ServersAdminTestJSON-1741344368 tempest-ServersAdminTestJSON-1741344368-project-admin] Task: {'id': task-2087258, 'name': ReconfigVM_Task, 'duration_secs': 0.178532} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.046526] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d29f70eb-0f9b-4a4a-986d-5deb55879e64 tempest-ServersAdminTestJSON-1741344368 tempest-ServersAdminTestJSON-1741344368-project-admin] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Reconfigured VM instance to set the machine id {{(pid=63279) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 2014.080167] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.089046] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Releasing lock "refresh_cache-ff9701ed-d545-44b4-911a-c4d809d0a771" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2014.089174] env[63279]: DEBUG nova.compute.manager [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Instance network_info: |[{"id": "4805f07f-4add-493e-80cb-e2c75cc21104", "address": "fa:16:3e:5e:52:6b", "network": {"id": "d63e8deb-e00b-4d35-a960-daa5c579e8e1", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-343913389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36142278730f4f6b87fc9a51283b5cdf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4805f07f-4a", "ovs_interfaceid": "4805f07f-4add-493e-80cb-e2c75cc21104", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2014.090745] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:52:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd986680e-ad16-45b1-bf6d-cd2fe661679f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4805f07f-4add-493e-80cb-e2c75cc21104', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2014.097625] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Creating folder: Project (36142278730f4f6b87fc9a51283b5cdf). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2014.098328] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f45861c3-2281-47ee-bb4d-f74e8043f0a0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.108228] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Created folder: Project (36142278730f4f6b87fc9a51283b5cdf) in parent group-v427491. [ 2014.108402] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Creating folder: Instances. Parent ref: group-v427639. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2014.108664] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-611e8dab-859f-4620-aa00-49251956cec4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.117767] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Created folder: Instances in parent group-v427639. [ 2014.118015] env[63279]: DEBUG oslo.service.loopingcall [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2014.121335] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2014.121527] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50fcb605-9abb-48eb-aa80-2d7bedfc27d9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.140756] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2014.140756] env[63279]: value = "task-2087261" [ 2014.140756] env[63279]: _type = "Task" [ 2014.140756] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.143806] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087257, 'name': Rename_Task, 'duration_secs': 1.151702} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.147161] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2014.147426] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-300807ce-334c-4f4e-b470-b4a5da58a973 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.155083] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087261, 'name': CreateVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.156882] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2014.156882] env[63279]: value = "task-2087262" [ 2014.156882] env[63279]: _type = "Task" [ 2014.156882] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.165029] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087262, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.165336] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2014.183753] env[63279]: DEBUG nova.compute.manager [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2014.206440] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0248352a-b141-4db1-ac2b-5125f47ee5eb tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "64e92bfc-c0d0-4918-9ba2-45ffedbf7e39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.490s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.210113] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "1b2ca21b-feea-4fc1-9ddc-99f144e4241a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.210436] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "1b2ca21b-feea-4fc1-9ddc-99f144e4241a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.210685] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "1b2ca21b-feea-4fc1-9ddc-99f144e4241a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.210900] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "1b2ca21b-feea-4fc1-9ddc-99f144e4241a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.211142] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "1b2ca21b-feea-4fc1-9ddc-99f144e4241a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.213168] env[63279]: INFO nova.compute.manager [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Terminating instance [ 2014.389031] env[63279]: DEBUG nova.scheduler.client.report [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2014.529356] env[63279]: DEBUG nova.compute.manager [req-5bd2ea89-cafe-4acd-a962-71bee5b73335 req-ed95c300-f5eb-4a82-8a88-bcc522384d51 service nova] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Received event network-changed-4805f07f-4add-493e-80cb-e2c75cc21104 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2014.529656] env[63279]: DEBUG nova.compute.manager [req-5bd2ea89-cafe-4acd-a962-71bee5b73335 req-ed95c300-f5eb-4a82-8a88-bcc522384d51 service nova] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Refreshing instance network info cache due to event network-changed-4805f07f-4add-493e-80cb-e2c75cc21104. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2014.529868] env[63279]: DEBUG oslo_concurrency.lockutils [req-5bd2ea89-cafe-4acd-a962-71bee5b73335 req-ed95c300-f5eb-4a82-8a88-bcc522384d51 service nova] Acquiring lock "refresh_cache-ff9701ed-d545-44b4-911a-c4d809d0a771" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.529907] env[63279]: DEBUG oslo_concurrency.lockutils [req-5bd2ea89-cafe-4acd-a962-71bee5b73335 req-ed95c300-f5eb-4a82-8a88-bcc522384d51 service nova] Acquired lock "refresh_cache-ff9701ed-d545-44b4-911a-c4d809d0a771" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.530084] env[63279]: DEBUG nova.network.neutron [req-5bd2ea89-cafe-4acd-a962-71bee5b73335 req-ed95c300-f5eb-4a82-8a88-bcc522384d51 service nova] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Refreshing network info cache for port 4805f07f-4add-493e-80cb-e2c75cc21104 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2014.654026] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087261, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.667958] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087262, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.683387] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "fb124cfa-24b4-4712-b8cc-c87df5d6231b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.683829] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "fb124cfa-24b4-4712-b8cc-c87df5d6231b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.684195] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "fb124cfa-24b4-4712-b8cc-c87df5d6231b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.684195] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "fb124cfa-24b4-4712-b8cc-c87df5d6231b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.684380] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "fb124cfa-24b4-4712-b8cc-c87df5d6231b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.686638] env[63279]: INFO nova.compute.manager [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Terminating instance [ 2014.688993] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.709738] env[63279]: DEBUG nova.compute.manager [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2014.717111] env[63279]: DEBUG nova.compute.manager [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2014.717220] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2014.719410] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4605807f-b6c6-44c6-aa2f-c59b81dc9ca5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.729743] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2014.729743] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b54a7ae-2d58-4d03-9bed-5cd28ef1c0d0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.735305] env[63279]: DEBUG oslo_vmware.api [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2014.735305] env[63279]: value = "task-2087263" [ 2014.735305] env[63279]: _type = "Task" [ 2014.735305] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.744449] env[63279]: DEBUG oslo_vmware.api [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.896532] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.229s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.899868] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.754s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.901268] env[63279]: DEBUG nova.objects.instance [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Lazy-loading 'resources' on Instance uuid df410051-d551-4a90-81f7-5630f5521a10 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2014.953783] env[63279]: INFO nova.scheduler.client.report [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Deleted allocations for instance ba2d6111-d93d-4216-b641-864b542ea253 [ 2015.040159] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b80a8b-7ea7-4fad-b59e-9b459a3e02df {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.040159] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e3d901de-dbcb-4531-969f-fc9ee5b909f4 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Suspending the VM {{(pid=63279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2015.040159] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-6f408adf-eb4b-4cf2-b93d-1f0a2f9f6e04 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.040159] env[63279]: DEBUG oslo_vmware.api [None req-e3d901de-dbcb-4531-969f-fc9ee5b909f4 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2015.040159] env[63279]: value = "task-2087264" [ 2015.040159] env[63279]: _type = "Task" [ 2015.040159] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.040159] env[63279]: DEBUG oslo_vmware.api [None req-e3d901de-dbcb-4531-969f-fc9ee5b909f4 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087264, 'name': SuspendVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.153940] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087261, 'name': CreateVM_Task, 'duration_secs': 0.632402} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.157061] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2015.157400] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2015.157563] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2015.157886] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2015.158813] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44db5f2c-3a5f-4f95-a2d6-4a152706051d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.167863] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Waiting for the task: (returnval){ [ 2015.167863] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529a2f1f-1677-5baa-1f16-ea4a58eea807" [ 2015.167863] env[63279]: _type = "Task" [ 2015.167863] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.171261] env[63279]: DEBUG oslo_vmware.api [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087262, 'name': PowerOnVM_Task, 'duration_secs': 0.514801} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.174770] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2015.174998] env[63279]: INFO nova.compute.manager [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Took 9.61 seconds to spawn the instance on the hypervisor. [ 2015.175201] env[63279]: DEBUG nova.compute.manager [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2015.175985] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f1ae5a-070f-43fa-a3db-828002d6590b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.185732] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529a2f1f-1677-5baa-1f16-ea4a58eea807, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.191768] env[63279]: DEBUG nova.compute.manager [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2015.191988] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2015.193020] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3cec405-a9bc-4ff1-9b3c-99337e246447 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.196879] env[63279]: DEBUG nova.compute.manager [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2015.204457] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2015.204731] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8bb6a74-9018-453c-bbdc-01423e40ed6d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.211714] env[63279]: DEBUG oslo_vmware.api [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2015.211714] env[63279]: value = "task-2087265" [ 2015.211714] env[63279]: _type = "Task" [ 2015.211714] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.227020] env[63279]: DEBUG oslo_vmware.api [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087265, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.238428] env[63279]: DEBUG nova.virt.hardware [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:51:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1569863363',id=19,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1197163513',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2015.238428] env[63279]: DEBUG nova.virt.hardware [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2015.238428] env[63279]: DEBUG nova.virt.hardware [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2015.238668] env[63279]: DEBUG nova.virt.hardware [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2015.238668] env[63279]: DEBUG nova.virt.hardware [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2015.239126] env[63279]: DEBUG nova.virt.hardware [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2015.239126] env[63279]: DEBUG nova.virt.hardware [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2015.239370] env[63279]: DEBUG nova.virt.hardware [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2015.240411] env[63279]: DEBUG nova.virt.hardware [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2015.240411] env[63279]: DEBUG nova.virt.hardware [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2015.240411] env[63279]: DEBUG nova.virt.hardware [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2015.241287] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6888f8ed-7634-40cc-8d88-1614731877da {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.248609] env[63279]: DEBUG oslo_concurrency.lockutils [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2015.258416] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6e4637-6215-4931-bcad-898cbb3f90a0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.262676] env[63279]: DEBUG oslo_vmware.api [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087263, 'name': PowerOffVM_Task, 'duration_secs': 0.215252} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.266039] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2015.266242] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2015.266960] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-061111ef-9b82-4dd2-b9ad-6f344ba0f5ef {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.350556] env[63279]: DEBUG nova.compute.manager [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2015.350556] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323dc650-f557-4a29-9ba4-1b41d49a0197 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.469390] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d5f8539-9b0d-46c6-a3f8-4965417b3f66 tempest-AttachInterfacesV270Test-1602135614 tempest-AttachInterfacesV270Test-1602135614-project-member] Lock "ba2d6111-d93d-4216-b641-864b542ea253" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.484s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2015.508189] env[63279]: DEBUG oslo_vmware.api [None req-e3d901de-dbcb-4531-969f-fc9ee5b909f4 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087264, 'name': SuspendVM_Task} progress is 62%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.514417] env[63279]: DEBUG nova.network.neutron [req-5bd2ea89-cafe-4acd-a962-71bee5b73335 req-ed95c300-f5eb-4a82-8a88-bcc522384d51 service nova] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Updated VIF entry in instance network info cache for port 4805f07f-4add-493e-80cb-e2c75cc21104. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2015.514769] env[63279]: DEBUG nova.network.neutron [req-5bd2ea89-cafe-4acd-a962-71bee5b73335 req-ed95c300-f5eb-4a82-8a88-bcc522384d51 service nova] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Updating instance_info_cache with network_info: [{"id": "4805f07f-4add-493e-80cb-e2c75cc21104", "address": "fa:16:3e:5e:52:6b", "network": {"id": "d63e8deb-e00b-4d35-a960-daa5c579e8e1", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-343913389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36142278730f4f6b87fc9a51283b5cdf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4805f07f-4a", "ovs_interfaceid": "4805f07f-4add-493e-80cb-e2c75cc21104", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2015.685945] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529a2f1f-1677-5baa-1f16-ea4a58eea807, 'name': SearchDatastore_Task, 'duration_secs': 0.060548} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.686444] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2015.686844] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2015.688269] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2015.688269] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2015.688269] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2015.691631] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-406e6c9c-c80b-4907-bd06-9ca2108388c2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.700520] env[63279]: INFO nova.compute.manager [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Took 38.48 seconds to build instance. [ 2015.706967] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2015.708129] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2015.708129] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-052b6e9f-cb62-4029-9c7a-e05c0937f61c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.724274] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Waiting for the task: (returnval){ [ 2015.724274] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]522aa547-fb10-a9b5-d789-a897f7e89497" [ 2015.724274] env[63279]: _type = "Task" [ 2015.724274] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.734113] env[63279]: DEBUG oslo_vmware.api [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087265, 'name': PowerOffVM_Task, 'duration_secs': 0.307688} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.734959] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2015.735186] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2015.735400] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05444b2e-a5b9-468d-9f24-0982ca52c5c5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.740585] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]522aa547-fb10-a9b5-d789-a897f7e89497, 'name': SearchDatastore_Task, 'duration_secs': 0.010417} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.741678] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c0ea73e-ffca-447e-969c-370dc4d6cca9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.749637] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Waiting for the task: (returnval){ [ 2015.749637] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526b3a4c-6f9c-dc7c-55d8-d98c16f2d73d" [ 2015.749637] env[63279]: _type = "Task" [ 2015.749637] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.757048] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526b3a4c-6f9c-dc7c-55d8-d98c16f2d73d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.861906] env[63279]: INFO nova.compute.manager [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] instance snapshotting [ 2015.864815] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf0c43a-c8a7-4270-8192-af565031ca39 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.870252] env[63279]: DEBUG nova.network.neutron [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Successfully updated port: d31aaccc-4f75-4bc4-898d-0f2680b13372 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2015.898021] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61d2588-6e49-42ea-92d2-160aa280a74c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.946630] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8613c09c-5158-4d8c-b684-eafd79de161b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.955437] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75d9e78-95a7-4001-baa1-898fb1faf268 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.989361] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62cd575b-45a9-486c-b468-0da2f36bb18c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.997346] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deefd42b-2df5-445d-a46a-0d7b572cec3a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.015563] env[63279]: DEBUG nova.compute.provider_tree [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2016.020925] env[63279]: DEBUG oslo_concurrency.lockutils [req-5bd2ea89-cafe-4acd-a962-71bee5b73335 req-ed95c300-f5eb-4a82-8a88-bcc522384d51 service nova] Releasing lock "refresh_cache-ff9701ed-d545-44b4-911a-c4d809d0a771" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2016.021315] env[63279]: DEBUG oslo_vmware.api [None req-e3d901de-dbcb-4531-969f-fc9ee5b909f4 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087264, 'name': SuspendVM_Task, 'duration_secs': 0.697622} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.021554] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e3d901de-dbcb-4531-969f-fc9ee5b909f4 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Suspended the VM {{(pid=63279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2016.021727] env[63279]: DEBUG nova.compute.manager [None req-e3d901de-dbcb-4531-969f-fc9ee5b909f4 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2016.022805] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f6e277-a25a-4ea1-b90d-51f5e5aca5db {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.202258] env[63279]: DEBUG oslo_concurrency.lockutils [None req-efbb3008-c956-4d6e-b972-b10e6dc25335 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.352s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.260595] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526b3a4c-6f9c-dc7c-55d8-d98c16f2d73d, 'name': SearchDatastore_Task, 'duration_secs': 0.010464} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.260873] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2016.261148] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ff9701ed-d545-44b4-911a-c4d809d0a771/ff9701ed-d545-44b4-911a-c4d809d0a771.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2016.261407] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5cd70d45-4b66-489b-bdda-215d918ab7d9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.268046] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Waiting for the task: (returnval){ [ 2016.268046] env[63279]: value = "task-2087268" [ 2016.268046] env[63279]: _type = "Task" [ 2016.268046] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.277020] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087268, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.372967] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "refresh_cache-0e12ab9b-a701-4e0f-9d96-939090f50494" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.373136] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquired lock "refresh_cache-0e12ab9b-a701-4e0f-9d96-939090f50494" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.373290] env[63279]: DEBUG nova.network.neutron [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2016.409180] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2016.409548] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-39d05f8b-1e2b-4282-965b-39be95ce42c3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.418017] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2016.418017] env[63279]: value = "task-2087269" [ 2016.418017] env[63279]: _type = "Task" [ 2016.418017] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.431707] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087269, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.519639] env[63279]: DEBUG nova.scheduler.client.report [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2016.705730] env[63279]: DEBUG nova.compute.manager [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2016.748046] env[63279]: DEBUG nova.compute.manager [req-75a006f4-d833-4e31-857e-2b0c6eae625f req-ce51cc1f-5ef7-412f-8185-81d6ae5fd674 service nova] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Received event network-vif-plugged-d31aaccc-4f75-4bc4-898d-0f2680b13372 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2016.748280] env[63279]: DEBUG oslo_concurrency.lockutils [req-75a006f4-d833-4e31-857e-2b0c6eae625f req-ce51cc1f-5ef7-412f-8185-81d6ae5fd674 service nova] Acquiring lock "0e12ab9b-a701-4e0f-9d96-939090f50494-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.748489] env[63279]: DEBUG oslo_concurrency.lockutils [req-75a006f4-d833-4e31-857e-2b0c6eae625f req-ce51cc1f-5ef7-412f-8185-81d6ae5fd674 service nova] Lock "0e12ab9b-a701-4e0f-9d96-939090f50494-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.748775] env[63279]: DEBUG oslo_concurrency.lockutils [req-75a006f4-d833-4e31-857e-2b0c6eae625f req-ce51cc1f-5ef7-412f-8185-81d6ae5fd674 service nova] Lock "0e12ab9b-a701-4e0f-9d96-939090f50494-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.748873] env[63279]: DEBUG nova.compute.manager [req-75a006f4-d833-4e31-857e-2b0c6eae625f req-ce51cc1f-5ef7-412f-8185-81d6ae5fd674 service nova] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] No waiting events found dispatching network-vif-plugged-d31aaccc-4f75-4bc4-898d-0f2680b13372 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2016.754090] env[63279]: WARNING nova.compute.manager [req-75a006f4-d833-4e31-857e-2b0c6eae625f req-ce51cc1f-5ef7-412f-8185-81d6ae5fd674 service nova] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Received unexpected event network-vif-plugged-d31aaccc-4f75-4bc4-898d-0f2680b13372 for instance with vm_state building and task_state spawning. [ 2016.754368] env[63279]: DEBUG nova.compute.manager [req-75a006f4-d833-4e31-857e-2b0c6eae625f req-ce51cc1f-5ef7-412f-8185-81d6ae5fd674 service nova] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Received event network-changed-d31aaccc-4f75-4bc4-898d-0f2680b13372 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2016.754548] env[63279]: DEBUG nova.compute.manager [req-75a006f4-d833-4e31-857e-2b0c6eae625f req-ce51cc1f-5ef7-412f-8185-81d6ae5fd674 service nova] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Refreshing instance network info cache due to event network-changed-d31aaccc-4f75-4bc4-898d-0f2680b13372. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2016.754755] env[63279]: DEBUG oslo_concurrency.lockutils [req-75a006f4-d833-4e31-857e-2b0c6eae625f req-ce51cc1f-5ef7-412f-8185-81d6ae5fd674 service nova] Acquiring lock "refresh_cache-0e12ab9b-a701-4e0f-9d96-939090f50494" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.778988] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087268, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.931048] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087269, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.939765] env[63279]: DEBUG nova.network.neutron [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2017.024956] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.125s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.029339] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.762s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.029900] env[63279]: DEBUG nova.objects.instance [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Lazy-loading 'resources' on Instance uuid 3f888d81-e73f-4486-bb64-849c873449bf {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2017.050421] env[63279]: INFO nova.scheduler.client.report [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Deleted allocations for instance df410051-d551-4a90-81f7-5630f5521a10 [ 2017.236334] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.250823] env[63279]: DEBUG nova.network.neutron [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Updating instance_info_cache with network_info: [{"id": "d31aaccc-4f75-4bc4-898d-0f2680b13372", "address": "fa:16:3e:ca:57:f6", "network": {"id": "26ed2848-6f14-4264-af0d-f08d62ab4413", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-659194361-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e931e0ddf1b840fdb8743fbdba05b28d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd31aaccc-4f", "ovs_interfaceid": "d31aaccc-4f75-4bc4-898d-0f2680b13372", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2017.281525] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087268, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520169} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.281845] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ff9701ed-d545-44b4-911a-c4d809d0a771/ff9701ed-d545-44b4-911a-c4d809d0a771.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2017.282096] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2017.282364] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a7286c8-c03a-45a6-abf3-f574d3f9ab3f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.291778] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Waiting for the task: (returnval){ [ 2017.291778] env[63279]: value = "task-2087270" [ 2017.291778] env[63279]: _type = "Task" [ 2017.291778] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.303598] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087270, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.432359] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087269, 'name': CreateSnapshot_Task, 'duration_secs': 0.982879} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.432686] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2017.433528] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c800ab0-176e-4b11-88a0-7d04b2e068b3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.562800] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a6f349c-4703-44a2-807a-eea47f310c9f tempest-ServerTagsTestJSON-1345092865 tempest-ServerTagsTestJSON-1345092865-project-member] Lock "df410051-d551-4a90-81f7-5630f5521a10" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.641s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.703955] env[63279]: INFO nova.compute.manager [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Rebuilding instance [ 2017.755928] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Releasing lock "refresh_cache-0e12ab9b-a701-4e0f-9d96-939090f50494" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.756249] env[63279]: DEBUG nova.compute.manager [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Instance network_info: |[{"id": "d31aaccc-4f75-4bc4-898d-0f2680b13372", "address": "fa:16:3e:ca:57:f6", "network": {"id": "26ed2848-6f14-4264-af0d-f08d62ab4413", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-659194361-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e931e0ddf1b840fdb8743fbdba05b28d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd31aaccc-4f", "ovs_interfaceid": "d31aaccc-4f75-4bc4-898d-0f2680b13372", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2017.761036] env[63279]: DEBUG oslo_concurrency.lockutils [req-75a006f4-d833-4e31-857e-2b0c6eae625f req-ce51cc1f-5ef7-412f-8185-81d6ae5fd674 service nova] Acquired lock "refresh_cache-0e12ab9b-a701-4e0f-9d96-939090f50494" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2017.761278] env[63279]: DEBUG nova.network.neutron [req-75a006f4-d833-4e31-857e-2b0c6eae625f req-ce51cc1f-5ef7-412f-8185-81d6ae5fd674 service nova] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Refreshing network info cache for port d31aaccc-4f75-4bc4-898d-0f2680b13372 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2017.762683] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:57:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccf76700-491b-4462-ab19-e6d3a9ff87ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd31aaccc-4f75-4bc4-898d-0f2680b13372', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2017.773028] env[63279]: DEBUG oslo.service.loopingcall [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2017.777661] env[63279]: DEBUG nova.compute.manager [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2017.778044] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2017.782130] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c433cba0-618d-4ceb-8352-7e38cd954338 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.791382] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c75127d-c71c-45fd-b71d-c85fcd458c24 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.830953] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2017.830953] env[63279]: value = "task-2087271" [ 2017.830953] env[63279]: _type = "Task" [ 2017.830953] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.845488] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087270, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086647} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.848900] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2017.850355] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087271, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.852455] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9c3d42-7ace-40d1-b45c-a429077bbfab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.878394] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] ff9701ed-d545-44b4-911a-c4d809d0a771/ff9701ed-d545-44b4-911a-c4d809d0a771.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2017.882043] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-013d031b-2d63-4c08-aef5-c0fab772727a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.903123] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Waiting for the task: (returnval){ [ 2017.903123] env[63279]: value = "task-2087272" [ 2017.903123] env[63279]: _type = "Task" [ 2017.903123] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.921889] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087272, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.953715] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2017.954089] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5027bbff-8bab-4911-978e-34d900a27347 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.967380] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2017.967380] env[63279]: value = "task-2087273" [ 2017.967380] env[63279]: _type = "Task" [ 2017.967380] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.975880] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087273, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.106130] env[63279]: DEBUG nova.network.neutron [req-75a006f4-d833-4e31-857e-2b0c6eae625f req-ce51cc1f-5ef7-412f-8185-81d6ae5fd674 service nova] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Updated VIF entry in instance network info cache for port d31aaccc-4f75-4bc4-898d-0f2680b13372. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2018.106536] env[63279]: DEBUG nova.network.neutron [req-75a006f4-d833-4e31-857e-2b0c6eae625f req-ce51cc1f-5ef7-412f-8185-81d6ae5fd674 service nova] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Updating instance_info_cache with network_info: [{"id": "d31aaccc-4f75-4bc4-898d-0f2680b13372", "address": "fa:16:3e:ca:57:f6", "network": {"id": "26ed2848-6f14-4264-af0d-f08d62ab4413", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-659194361-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e931e0ddf1b840fdb8743fbdba05b28d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd31aaccc-4f", "ovs_interfaceid": "d31aaccc-4f75-4bc4-898d-0f2680b13372", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2018.122664] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e955eee1-ee7b-4f45-a1c7-410a9081f2b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.131366] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb56f365-e696-4085-928d-3536162e9695 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.172762] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfff53a5-0838-46dc-9888-71d63cb8f372 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.182999] env[63279]: DEBUG nova.compute.manager [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Stashing vm_state: active {{(pid=63279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2018.189547] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a071d6-5ea5-439e-a142-5e36b72aa068 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.206623] env[63279]: DEBUG nova.compute.provider_tree [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2018.348753] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087271, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.415147] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087272, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.458144] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2018.458144] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2018.458263] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Deleting the datastore file [datastore1] 1b2ca21b-feea-4fc1-9ddc-99f144e4241a {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2018.458453] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0724769b-2cc9-47c7-9e59-73c38fe0446a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.461867] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2018.462090] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2018.462262] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Deleting the datastore file [datastore1] fb124cfa-24b4-4712-b8cc-c87df5d6231b {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2018.462876] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcca9f38-a742-47d8-b031-543a8223d10e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.466640] env[63279]: DEBUG oslo_vmware.api [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2018.466640] env[63279]: value = "task-2087274" [ 2018.466640] env[63279]: _type = "Task" [ 2018.466640] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.472015] env[63279]: DEBUG oslo_vmware.api [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for the task: (returnval){ [ 2018.472015] env[63279]: value = "task-2087275" [ 2018.472015] env[63279]: _type = "Task" [ 2018.472015] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.481872] env[63279]: DEBUG oslo_vmware.api [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087274, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.486461] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087273, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.489770] env[63279]: DEBUG oslo_vmware.api [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087275, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.609449] env[63279]: DEBUG oslo_concurrency.lockutils [req-75a006f4-d833-4e31-857e-2b0c6eae625f req-ce51cc1f-5ef7-412f-8185-81d6ae5fd674 service nova] Releasing lock "refresh_cache-0e12ab9b-a701-4e0f-9d96-939090f50494" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2018.716652] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.737218] env[63279]: ERROR nova.scheduler.client.report [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] [req-56a77553-7fcc-4e14-9a6d-404e3d10af16] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-56a77553-7fcc-4e14-9a6d-404e3d10af16"}]} [ 2018.760251] env[63279]: DEBUG nova.scheduler.client.report [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2018.842252] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2018.843031] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8bcbd94-def3-49ab-97ac-f3b000a98659 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.848316] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087271, 'name': CreateVM_Task, 'duration_secs': 0.79793} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.848483] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2018.850132] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2018.850302] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.850618] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2018.850934] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2018.850934] env[63279]: value = "task-2087276" [ 2018.850934] env[63279]: _type = "Task" [ 2018.850934] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.851138] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ed07e83-9504-441b-80da-7e9a7da2fd1c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.858119] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 2018.858119] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52157c25-6ad4-16f8-7837-eb17f285267f" [ 2018.858119] env[63279]: _type = "Task" [ 2018.858119] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.862289] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087276, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.870050] env[63279]: DEBUG nova.scheduler.client.report [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2018.870277] env[63279]: DEBUG nova.compute.provider_tree [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2018.880934] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52157c25-6ad4-16f8-7837-eb17f285267f, 'name': SearchDatastore_Task, 'duration_secs': 0.009185} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.881330] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2018.881568] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2018.881838] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2018.882010] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.882282] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2018.882552] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce1d72dc-470f-4241-8dd9-294e9cfd3371 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.885013] env[63279]: DEBUG nova.scheduler.client.report [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2018.896054] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2018.896054] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2018.896054] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a6673fc-bdd0-4a79-aa6a-dcb9cd93264b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.901372] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 2018.901372] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527ee911-d654-3918-0d8b-95094650035a" [ 2018.901372] env[63279]: _type = "Task" [ 2018.901372] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.908449] env[63279]: DEBUG nova.scheduler.client.report [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2018.916716] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527ee911-d654-3918-0d8b-95094650035a, 'name': SearchDatastore_Task, 'duration_secs': 0.007928} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.916716] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-677feca0-f4a5-4079-a34b-2d57fb54bef5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.922273] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087272, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.925770] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 2018.925770] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5275cf0f-f629-b64b-4f96-92f7bb9d012f" [ 2018.925770] env[63279]: _type = "Task" [ 2018.925770] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.933455] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5275cf0f-f629-b64b-4f96-92f7bb9d012f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.982753] env[63279]: DEBUG oslo_vmware.api [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087274, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280169} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.988928] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2018.989178] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2018.989377] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2018.990043] env[63279]: INFO nova.compute.manager [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Took 4.27 seconds to destroy the instance on the hypervisor. [ 2018.990043] env[63279]: DEBUG oslo.service.loopingcall [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2018.990242] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087273, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.993233] env[63279]: DEBUG nova.compute.manager [-] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2018.993337] env[63279]: DEBUG nova.network.neutron [-] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2018.995230] env[63279]: DEBUG oslo_vmware.api [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Task: {'id': task-2087275, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198181} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.995442] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2018.995620] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2018.995829] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2018.995973] env[63279]: INFO nova.compute.manager [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Took 3.80 seconds to destroy the instance on the hypervisor. [ 2018.996340] env[63279]: DEBUG oslo.service.loopingcall [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2018.996715] env[63279]: DEBUG nova.compute.manager [-] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2018.996870] env[63279]: DEBUG nova.network.neutron [-] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2019.227618] env[63279]: DEBUG nova.compute.manager [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2019.228740] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3325869-6710-44ae-9bfd-29e21c36e361 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.361934] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087276, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.423686] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087272, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.436923] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5275cf0f-f629-b64b-4f96-92f7bb9d012f, 'name': SearchDatastore_Task, 'duration_secs': 0.009191} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.440420] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.440420] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 0e12ab9b-a701-4e0f-9d96-939090f50494/0e12ab9b-a701-4e0f-9d96-939090f50494.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2019.440989] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f26d2bd1-a942-42aa-8463-19b7947bd5b8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.448154] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 2019.448154] env[63279]: value = "task-2087277" [ 2019.448154] env[63279]: _type = "Task" [ 2019.448154] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.457363] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087277, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.479361] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6844fb7c-253a-4612-b119-3c9df1f01c04 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.485609] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087273, 'name': CloneVM_Task} progress is 95%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.492308] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97b0e30-abc7-408f-b647-035f229ddf6a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.525841] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8033f1ea-0c2f-43af-8379-282dd379218e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.533460] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d3f448-4977-4255-ac2d-57eac7981cbd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.552459] env[63279]: DEBUG nova.compute.provider_tree [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2019.708114] env[63279]: DEBUG nova.compute.manager [req-804fe66f-619a-4e7b-b2af-fe90f78d21de req-b30d9238-9a1e-4fbe-b593-abdc7c4456a0 service nova] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Received event network-vif-deleted-a55a63d0-c628-4d90-b3dc-d8f0bf564e95 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2019.708345] env[63279]: INFO nova.compute.manager [req-804fe66f-619a-4e7b-b2af-fe90f78d21de req-b30d9238-9a1e-4fbe-b593-abdc7c4456a0 service nova] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Neutron deleted interface a55a63d0-c628-4d90-b3dc-d8f0bf564e95; detaching it from the instance and deleting it from the info cache [ 2019.708544] env[63279]: DEBUG nova.network.neutron [req-804fe66f-619a-4e7b-b2af-fe90f78d21de req-b30d9238-9a1e-4fbe-b593-abdc7c4456a0 service nova] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.741456] env[63279]: INFO nova.compute.manager [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] instance snapshotting [ 2019.741456] env[63279]: WARNING nova.compute.manager [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 2019.744857] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ceca9e8-1c1b-4531-b3fa-534d70cb3cba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.768771] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e9c7d0-576a-4504-883a-57730e4d7863 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.866414] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087276, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.921164] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087272, 'name': ReconfigVM_Task, 'duration_secs': 1.75005} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.921801] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Reconfigured VM instance instance-00000037 to attach disk [datastore1] ff9701ed-d545-44b4-911a-c4d809d0a771/ff9701ed-d545-44b4-911a-c4d809d0a771.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2019.922645] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d5192e6-2da8-4361-b9dd-bded5aa3d3ac {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.930363] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Waiting for the task: (returnval){ [ 2019.930363] env[63279]: value = "task-2087278" [ 2019.930363] env[63279]: _type = "Task" [ 2019.930363] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.941693] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087278, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.962959] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087277, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.986388] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087273, 'name': CloneVM_Task} progress is 95%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.059021] env[63279]: DEBUG nova.scheduler.client.report [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2020.061535] env[63279]: DEBUG nova.network.neutron [-] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.084092] env[63279]: DEBUG nova.network.neutron [-] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.212123] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3105ac65-6b54-45f1-9d2a-79436d7a863b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.223454] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743e7ee8-c478-4a4b-9c70-0d73e9a45ba7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.259274] env[63279]: DEBUG nova.compute.manager [req-804fe66f-619a-4e7b-b2af-fe90f78d21de req-b30d9238-9a1e-4fbe-b593-abdc7c4456a0 service nova] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Detach interface failed, port_id=a55a63d0-c628-4d90-b3dc-d8f0bf564e95, reason: Instance 1b2ca21b-feea-4fc1-9ddc-99f144e4241a could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2020.282017] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2020.283071] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-63ed222a-2f13-4025-8aeb-ac008612acc5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.292023] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2020.292023] env[63279]: value = "task-2087279" [ 2020.292023] env[63279]: _type = "Task" [ 2020.292023] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.297937] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087279, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.365329] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087276, 'name': PowerOffVM_Task, 'duration_secs': 1.025013} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.365665] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2020.366044] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2020.367236] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e43c52c-f492-4db8-891a-0785aa35666b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.376223] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2020.376515] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-405bd1ff-703f-4e86-ba49-fda401ec1291 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.442333] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087278, 'name': Rename_Task, 'duration_secs': 0.351009} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.443117] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2020.443871] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2efb4a98-45db-4a20-b6e4-bcfad3d9dc6f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.451810] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Waiting for the task: (returnval){ [ 2020.451810] env[63279]: value = "task-2087281" [ 2020.451810] env[63279]: _type = "Task" [ 2020.451810] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.469662] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087277, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.637268} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.474030] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 0e12ab9b-a701-4e0f-9d96-939090f50494/0e12ab9b-a701-4e0f-9d96-939090f50494.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2020.474504] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2020.474936] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087281, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.475469] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-737d7c35-b434-442f-9dd9-2508263e18ed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.489624] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087273, 'name': CloneVM_Task, 'duration_secs': 2.153611} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.491712] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Created linked-clone VM from snapshot [ 2020.492127] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 2020.492127] env[63279]: value = "task-2087282" [ 2020.492127] env[63279]: _type = "Task" [ 2020.492127] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.492914] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b3d5f7-f965-458c-9861-591b9689bd92 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.506209] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Uploading image e269ed35-b269-44cd-b486-4e07b591652b {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2020.511722] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087282, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.524466] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2020.524797] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-737792b9-d299-4464-aa18-3e0e4168e19d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.532744] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2020.532744] env[63279]: value = "task-2087283" [ 2020.532744] env[63279]: _type = "Task" [ 2020.532744] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.545025] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087283, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.564602] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.535s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.571709] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.472s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2020.571709] env[63279]: INFO nova.compute.claims [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2020.576893] env[63279]: INFO nova.compute.manager [-] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Took 1.58 seconds to deallocate network for instance. [ 2020.587249] env[63279]: INFO nova.compute.manager [-] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Took 1.59 seconds to deallocate network for instance. [ 2020.605548] env[63279]: INFO nova.scheduler.client.report [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Deleted allocations for instance 3f888d81-e73f-4486-bb64-849c873449bf [ 2020.611917] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2020.612552] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2020.612552] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleting the datastore file [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2020.612893] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-630397b5-b2c9-45cb-93bd-ec808af06a8e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.621365] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2020.621365] env[63279]: value = "task-2087284" [ 2020.621365] env[63279]: _type = "Task" [ 2020.621365] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.634415] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087284, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.802324] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087279, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.970434] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087281, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.006592] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087282, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096246} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.006885] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2021.007806] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159d9a65-4801-4822-96bf-d73895f501c3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.032910] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 0e12ab9b-a701-4e0f-9d96-939090f50494/0e12ab9b-a701-4e0f-9d96-939090f50494.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2021.033328] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0c0d7c1-ee98-4c4b-8f2d-5386da2ab080 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.057751] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087283, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.059479] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 2021.059479] env[63279]: value = "task-2087285" [ 2021.059479] env[63279]: _type = "Task" [ 2021.059479] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.068686] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087285, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.078862] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.095810] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.118891] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2f246f26-19c3-4ded-af34-51018ed72035 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251 tempest-FloatingIPsAssociationNegativeTestJSON-1305161251-project-member] Lock "3f888d81-e73f-4486-bb64-849c873449bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.616s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.133492] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087284, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237264} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.133782] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2021.133990] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2021.134190] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2021.303363] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087279, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.477256] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087281, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.545040] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087283, 'name': Destroy_Task, 'duration_secs': 0.801099} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.545040] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Destroyed the VM [ 2021.545040] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2021.545040] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-42df37ac-d549-435c-a3cf-4dfad80df7df {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.552092] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2021.552092] env[63279]: value = "task-2087286" [ 2021.552092] env[63279]: _type = "Task" [ 2021.552092] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.562357] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087286, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.572149] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087285, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.803281] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087279, 'name': CreateSnapshot_Task, 'duration_secs': 1.165541} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.806562] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2021.807828] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df492c0-b79b-4055-8940-049ccd5b6942 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.926970] env[63279]: DEBUG nova.compute.manager [req-a1d9ea79-4c19-4c0a-a3d9-21764bb12d0b req-ed0b6e10-4ebf-4330-8dce-05e0ba91caec service nova] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Received event network-vif-deleted-5fc8a732-f9cf-4494-b984-31c593d7106b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2021.974609] env[63279]: DEBUG oslo_vmware.api [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087281, 'name': PowerOnVM_Task, 'duration_secs': 1.080017} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.974944] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2021.975213] env[63279]: INFO nova.compute.manager [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Took 11.43 seconds to spawn the instance on the hypervisor. [ 2021.975396] env[63279]: DEBUG nova.compute.manager [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2021.976373] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19dd60b2-0f6b-4a3f-b96f-b0049f61caec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.063073] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087286, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.074626] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087285, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.142908] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b4d0971-3aee-4694-80cb-06fbd59ec5b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.156910] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e7f6f2-4502-48d1-8cd3-cb2f38fc4702 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.206324] env[63279]: DEBUG nova.virt.hardware [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2022.206655] env[63279]: DEBUG nova.virt.hardware [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2022.206779] env[63279]: DEBUG nova.virt.hardware [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2022.206969] env[63279]: DEBUG nova.virt.hardware [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2022.207135] env[63279]: DEBUG nova.virt.hardware [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2022.207301] env[63279]: DEBUG nova.virt.hardware [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2022.207504] env[63279]: DEBUG nova.virt.hardware [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2022.207668] env[63279]: DEBUG nova.virt.hardware [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2022.207838] env[63279]: DEBUG nova.virt.hardware [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2022.208030] env[63279]: DEBUG nova.virt.hardware [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2022.208194] env[63279]: DEBUG nova.virt.hardware [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2022.209071] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5038a7-3d1c-40d0-a65d-610409848c11 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.212267] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93294d19-be9d-4f39-9d6c-e5b7c0892253 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.222976] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c09057a-15cf-4ab7-89dd-6cab8a744034 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.227348] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a73be07-2eef-4757-8ecf-442f0423202a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.242174] env[63279]: DEBUG nova.compute.provider_tree [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2022.254714] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:65:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '678ebbe4-4c53-4eaf-a689-93981310f37d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc9f3899-95c1-4e79-b121-03c9a2c0bc44', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2022.263147] env[63279]: DEBUG oslo.service.loopingcall [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2022.264662] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2022.264662] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c23bba3-2c49-4383-bd5b-3507d933992b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.287721] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2022.287721] env[63279]: value = "task-2087287" [ 2022.287721] env[63279]: _type = "Task" [ 2022.287721] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.296047] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087287, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.328691] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2022.329208] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d081627a-39ed-4d19-9136-afcb20601712 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.338778] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2022.338778] env[63279]: value = "task-2087288" [ 2022.338778] env[63279]: _type = "Task" [ 2022.338778] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.352192] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087288, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.488416] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquiring lock "4871421f-0015-4973-bb5f-c9042d411c82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.488932] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Lock "4871421f-0015-4973-bb5f-c9042d411c82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2022.502424] env[63279]: INFO nova.compute.manager [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Took 39.85 seconds to build instance. [ 2022.567661] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087286, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.579801] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087285, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.767035] env[63279]: DEBUG nova.scheduler.client.report [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2022.800982] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087287, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.850435] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087288, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.005663] env[63279]: DEBUG oslo_concurrency.lockutils [None req-38cbe7fd-87b8-43d7-9843-9491e5a539a0 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Lock "ff9701ed-d545-44b4-911a-c4d809d0a771" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.962s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.068233] env[63279]: DEBUG oslo_vmware.api [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087286, 'name': RemoveSnapshot_Task, 'duration_secs': 1.076892} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.069795] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2023.088814] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087285, 'name': ReconfigVM_Task, 'duration_secs': 1.595173} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.089524] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 0e12ab9b-a701-4e0f-9d96-939090f50494/0e12ab9b-a701-4e0f-9d96-939090f50494.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2023.090032] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=63279) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 2023.092964] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-95addb85-cddb-460c-a878-c730f941bbf8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.102377] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 2023.102377] env[63279]: value = "task-2087289" [ 2023.102377] env[63279]: _type = "Task" [ 2023.102377] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.112807] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087289, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.272122] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.272194] env[63279]: DEBUG nova.compute.manager [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2023.276020] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.834s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.276020] env[63279]: DEBUG nova.objects.instance [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lazy-loading 'resources' on Instance uuid 79032b2a-74f7-4c6d-8f71-f848fe372ba2 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2023.304949] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087287, 'name': CreateVM_Task, 'duration_secs': 0.75355} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.305144] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2023.306638] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2023.306638] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2023.306638] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2023.306938] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68a0bf45-0346-4973-b933-50b135ca7a41 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.313012] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2023.313012] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52603f5d-a6bd-52ae-045d-995a8aef9234" [ 2023.313012] env[63279]: _type = "Task" [ 2023.313012] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.323542] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52603f5d-a6bd-52ae-045d-995a8aef9234, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.353848] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087288, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.508710] env[63279]: DEBUG nova.compute.manager [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2023.579824] env[63279]: WARNING nova.compute.manager [None req-49457093-f0b7-4e7d-b6de-3bdb068f8189 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Image not found during snapshot: nova.exception.ImageNotFound: Image e269ed35-b269-44cd-b486-4e07b591652b could not be found. [ 2023.581741] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Acquiring lock "ff9701ed-d545-44b4-911a-c4d809d0a771" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.581960] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Lock "ff9701ed-d545-44b4-911a-c4d809d0a771" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.582478] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Acquiring lock "ff9701ed-d545-44b4-911a-c4d809d0a771-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.582759] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Lock "ff9701ed-d545-44b4-911a-c4d809d0a771-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.582947] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Lock "ff9701ed-d545-44b4-911a-c4d809d0a771-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.585929] env[63279]: INFO nova.compute.manager [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Terminating instance [ 2023.617077] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087289, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.051517} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.617358] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=63279) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 2023.618123] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cdc1fe5-708c-4f86-b55a-4ec062877b1a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.646323] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 0e12ab9b-a701-4e0f-9d96-939090f50494/ephemeral_0.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2023.646769] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4908d7ee-50e0-4da5-ac16-06e59e5ba3a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.668028] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Acquiring lock "246f0945-7290-4cb7-a982-b17cb1573002" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.668028] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Lock "246f0945-7290-4cb7-a982-b17cb1573002" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.671694] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 2023.671694] env[63279]: value = "task-2087290" [ 2023.671694] env[63279]: _type = "Task" [ 2023.671694] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.681485] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087290, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.780106] env[63279]: DEBUG nova.compute.utils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2023.782750] env[63279]: DEBUG nova.compute.manager [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2023.783098] env[63279]: DEBUG nova.network.neutron [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2023.823911] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52603f5d-a6bd-52ae-045d-995a8aef9234, 'name': SearchDatastore_Task, 'duration_secs': 0.009209} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.826512] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2023.826803] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2023.827066] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2023.827226] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2023.827411] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2023.828539] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02fc12f7-e080-4fb2-bbad-7e419b901dde {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.831235] env[63279]: DEBUG nova.policy [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '90b0038da16e48ab8f7951f25a8eaa72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4f07bad15ea5419cbecc5840b4e96d01', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2023.839240] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2023.839550] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2023.842362] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39ed68b2-d10c-4def-ae2b-2dab3504be9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.851016] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2023.851016] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5250db5e-86a8-b2e6-7b77-7900f0719407" [ 2023.851016] env[63279]: _type = "Task" [ 2023.851016] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.854942] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087288, 'name': CloneVM_Task, 'duration_secs': 1.411022} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.862611] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Created linked-clone VM from snapshot [ 2023.863575] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196d6ac2-de43-4267-bb7b-64c23c81e12f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.872026] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5250db5e-86a8-b2e6-7b77-7900f0719407, 'name': SearchDatastore_Task, 'duration_secs': 0.017248} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.876057] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Uploading image cf3576cc-916a-43f4-926d-5dee7d5ab54a {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2023.880382] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69f034da-fd94-4456-bff5-b61829f4a39a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.886991] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2023.886991] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523ba91e-74a3-4c43-153d-4ba010a1f099" [ 2023.886991] env[63279]: _type = "Task" [ 2023.886991] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.897015] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523ba91e-74a3-4c43-153d-4ba010a1f099, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.906881] env[63279]: DEBUG oslo_vmware.rw_handles [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2023.906881] env[63279]: value = "vm-427647" [ 2023.906881] env[63279]: _type = "VirtualMachine" [ 2023.906881] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2023.907173] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-88145d57-17b3-4780-a74d-4ca7069585e9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.914753] env[63279]: DEBUG oslo_vmware.rw_handles [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lease: (returnval){ [ 2023.914753] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ee7fdf-8f96-3eb0-b2a3-412004a0fe63" [ 2023.914753] env[63279]: _type = "HttpNfcLease" [ 2023.914753] env[63279]: } obtained for exporting VM: (result){ [ 2023.914753] env[63279]: value = "vm-427647" [ 2023.914753] env[63279]: _type = "VirtualMachine" [ 2023.914753] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2023.914992] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the lease: (returnval){ [ 2023.914992] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ee7fdf-8f96-3eb0-b2a3-412004a0fe63" [ 2023.914992] env[63279]: _type = "HttpNfcLease" [ 2023.914992] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2023.927565] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2023.927565] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ee7fdf-8f96-3eb0-b2a3-412004a0fe63" [ 2023.927565] env[63279]: _type = "HttpNfcLease" [ 2023.927565] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2024.040315] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.089719] env[63279]: DEBUG nova.compute.manager [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2024.089937] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2024.090794] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1814c43f-7388-4162-b16e-7b8176b8dbd5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.100028] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2024.100298] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe9fd882-3a5d-4969-8839-990e63dce5b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.112562] env[63279]: DEBUG oslo_vmware.api [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Waiting for the task: (returnval){ [ 2024.112562] env[63279]: value = "task-2087292" [ 2024.112562] env[63279]: _type = "Task" [ 2024.112562] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.121416] env[63279]: DEBUG oslo_vmware.api [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087292, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.128204] env[63279]: DEBUG nova.network.neutron [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Successfully created port: 5576a5f4-6c18-428c-82f9-6cedc1a2b828 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2024.190574] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087290, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.288816] env[63279]: DEBUG nova.compute.manager [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2024.333346] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd94c784-bfac-4e32-b4b1-f0de3bea15cc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.343141] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e25e72-744f-4096-8fef-128c4717b470 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.376712] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045abcdc-1860-4a85-af07-ac3805b2cc03 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.387280] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14eb471b-acce-4644-a981-b0fd63ace53a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.410265] env[63279]: DEBUG nova.compute.provider_tree [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2024.416361] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523ba91e-74a3-4c43-153d-4ba010a1f099, 'name': SearchDatastore_Task, 'duration_secs': 0.013033} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.417921] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2024.417921] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51/6388f912-ae70-4e8f-b8e4-ceb02e0f8a51.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2024.423155] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af8c8056-bc52-41c2-85d6-e7286a74061b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.433310] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2024.433310] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ee7fdf-8f96-3eb0-b2a3-412004a0fe63" [ 2024.433310] env[63279]: _type = "HttpNfcLease" [ 2024.433310] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2024.433637] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2024.433637] env[63279]: value = "task-2087293" [ 2024.433637] env[63279]: _type = "Task" [ 2024.433637] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.433886] env[63279]: DEBUG oslo_vmware.rw_handles [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2024.433886] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ee7fdf-8f96-3eb0-b2a3-412004a0fe63" [ 2024.433886] env[63279]: _type = "HttpNfcLease" [ 2024.433886] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2024.434663] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e51114-c1a6-457a-b508-05b4d18f2e08 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.450374] env[63279]: DEBUG oslo_vmware.rw_handles [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fb6da5-b4fe-c297-cca3-7a95159f9b54/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2024.450560] env[63279]: DEBUG oslo_vmware.rw_handles [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fb6da5-b4fe-c297-cca3-7a95159f9b54/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2024.452055] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087293, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.545563] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-02a2d611-2f15-40ee-aabd-08a27e78e014 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.630246] env[63279]: DEBUG oslo_vmware.api [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087292, 'name': PowerOffVM_Task, 'duration_secs': 0.366147} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.630530] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2024.630742] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2024.631247] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32e4205f-470f-40f8-ab4c-7ead8dfbe6dc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.681691] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "8ccb4293-927a-45ba-82e9-9f1b4d5985cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.681961] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "8ccb4293-927a-45ba-82e9-9f1b4d5985cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.682188] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "8ccb4293-927a-45ba-82e9-9f1b4d5985cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.682375] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "8ccb4293-927a-45ba-82e9-9f1b4d5985cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.682548] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "8ccb4293-927a-45ba-82e9-9f1b4d5985cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.684220] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087290, 'name': ReconfigVM_Task, 'duration_secs': 0.820503} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.684774] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 0e12ab9b-a701-4e0f-9d96-939090f50494/ephemeral_0.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2024.689019] env[63279]: INFO nova.compute.manager [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Terminating instance [ 2024.689019] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9779aef2-f62a-4f38-91e1-3dc9d1252bba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.693859] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 2024.693859] env[63279]: value = "task-2087295" [ 2024.693859] env[63279]: _type = "Task" [ 2024.693859] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.705247] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087295, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.919189] env[63279]: DEBUG nova.scheduler.client.report [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2024.944412] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087293, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50141} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.945520] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51/6388f912-ae70-4e8f-b8e4-ceb02e0f8a51.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2024.946278] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2024.946278] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a65bee3c-b98a-4d24-ba70-09438f8d5ac2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.956045] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2024.956045] env[63279]: value = "task-2087296" [ 2024.956045] env[63279]: _type = "Task" [ 2024.956045] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.966533] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087296, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.971285] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2024.971519] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2024.971706] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Deleting the datastore file [datastore1] ff9701ed-d545-44b4-911a-c4d809d0a771 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2024.972018] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63eaede2-037d-4ca9-bb83-5d75ea6aea73 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.978577] env[63279]: DEBUG oslo_vmware.api [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Waiting for the task: (returnval){ [ 2024.978577] env[63279]: value = "task-2087297" [ 2024.978577] env[63279]: _type = "Task" [ 2024.978577] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.988550] env[63279]: DEBUG oslo_vmware.api [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087297, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.192886] env[63279]: DEBUG nova.compute.manager [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2025.193066] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2025.194033] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a8b604-c185-4322-81b9-bb69e7e3b6ae {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.208580] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087295, 'name': Rename_Task, 'duration_secs': 0.299947} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.211087] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2025.211469] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2025.211796] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2f32347-3c19-4e48-80eb-d23f99cb7cc7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.213413] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9159b598-ffb3-4c27-b195-ed82a7e29122 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.220318] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 2025.220318] env[63279]: value = "task-2087298" [ 2025.220318] env[63279]: _type = "Task" [ 2025.220318] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.222299] env[63279]: DEBUG oslo_vmware.api [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2025.222299] env[63279]: value = "task-2087299" [ 2025.222299] env[63279]: _type = "Task" [ 2025.222299] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.246355] env[63279]: DEBUG oslo_vmware.api [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087299, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.246743] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087298, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.306841] env[63279]: DEBUG nova.compute.manager [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2025.334437] env[63279]: DEBUG nova.virt.hardware [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2025.334872] env[63279]: DEBUG nova.virt.hardware [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2025.334990] env[63279]: DEBUG nova.virt.hardware [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2025.335189] env[63279]: DEBUG nova.virt.hardware [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2025.335358] env[63279]: DEBUG nova.virt.hardware [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2025.335599] env[63279]: DEBUG nova.virt.hardware [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2025.335970] env[63279]: DEBUG nova.virt.hardware [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2025.336172] env[63279]: DEBUG nova.virt.hardware [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2025.336381] env[63279]: DEBUG nova.virt.hardware [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2025.336740] env[63279]: DEBUG nova.virt.hardware [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2025.336979] env[63279]: DEBUG nova.virt.hardware [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2025.337918] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a55572-78a5-4a27-b65a-bc49cbd5f02c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.346752] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09046d54-d37e-418e-b0b9-8ee930868d03 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.427053] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.152s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.432568] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.048s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.432890] env[63279]: DEBUG nova.objects.instance [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2025.469336] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087296, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104794} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.470314] env[63279]: INFO nova.scheduler.client.report [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Deleted allocations for instance 79032b2a-74f7-4c6d-8f71-f848fe372ba2 [ 2025.472190] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2025.475870] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056196e9-6b73-494e-bd32-db00a91b21f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.502374] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51/6388f912-ae70-4e8f-b8e4-ceb02e0f8a51.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2025.503579] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-676ffdad-28fd-4996-9307-9eb3d2ce121a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.523060] env[63279]: DEBUG oslo_vmware.api [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Task: {'id': task-2087297, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.34074} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.523788] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2025.523995] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2025.524284] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2025.524454] env[63279]: INFO nova.compute.manager [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Took 1.43 seconds to destroy the instance on the hypervisor. [ 2025.524826] env[63279]: DEBUG oslo.service.loopingcall [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2025.525060] env[63279]: DEBUG nova.compute.manager [-] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2025.525356] env[63279]: DEBUG nova.network.neutron [-] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2025.530062] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2025.530062] env[63279]: value = "task-2087300" [ 2025.530062] env[63279]: _type = "Task" [ 2025.530062] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.540596] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087300, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.743934] env[63279]: DEBUG oslo_vmware.api [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087298, 'name': PowerOnVM_Task, 'duration_secs': 0.490564} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.745580] env[63279]: DEBUG oslo_vmware.api [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087299, 'name': PowerOffVM_Task, 'duration_secs': 0.166635} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.745848] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2025.746118] env[63279]: INFO nova.compute.manager [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Took 10.55 seconds to spawn the instance on the hypervisor. [ 2025.746332] env[63279]: DEBUG nova.compute.manager [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2025.746640] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2025.746919] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2025.747790] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f5fd42-bf41-4d4a-b3ab-4c6581e48741 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.750904] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-475a63ce-1dea-4de9-a324-24cbd4cfafa6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.894718] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2025.894980] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2025.895206] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Deleting the datastore file [datastore1] 8ccb4293-927a-45ba-82e9-9f1b4d5985cc {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2025.895473] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a5ea428-e20c-41b3-9a6f-b05e7ea9e59b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.905460] env[63279]: DEBUG oslo_vmware.api [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2025.905460] env[63279]: value = "task-2087302" [ 2025.905460] env[63279]: _type = "Task" [ 2025.905460] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.915957] env[63279]: DEBUG oslo_vmware.api [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087302, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.979563] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0d7ac8a8-a0e2-4644-bc68-36c40b80e766 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.675s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.980802] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 32.983s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.981059] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.981378] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.981642] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.985288] env[63279]: INFO nova.compute.manager [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Terminating instance [ 2026.046936] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087300, 'name': ReconfigVM_Task, 'duration_secs': 0.26637} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.047834] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51/6388f912-ae70-4e8f-b8e4-ceb02e0f8a51.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2026.048122] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e554d3ca-fbf1-4eec-9bf0-448857bada7c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.055134] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2026.055134] env[63279]: value = "task-2087303" [ 2026.055134] env[63279]: _type = "Task" [ 2026.055134] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.067505] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087303, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.267788] env[63279]: INFO nova.compute.manager [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Took 43.41 seconds to build instance. [ 2026.315188] env[63279]: DEBUG nova.compute.manager [req-7a21cc72-ce42-4795-ae08-cb08de393355 req-a7e70d42-0b65-44a4-a5e0-91074d29b2bf service nova] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Received event network-vif-deleted-4805f07f-4add-493e-80cb-e2c75cc21104 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2026.315387] env[63279]: INFO nova.compute.manager [req-7a21cc72-ce42-4795-ae08-cb08de393355 req-a7e70d42-0b65-44a4-a5e0-91074d29b2bf service nova] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Neutron deleted interface 4805f07f-4add-493e-80cb-e2c75cc21104; detaching it from the instance and deleting it from the info cache [ 2026.315558] env[63279]: DEBUG nova.network.neutron [req-7a21cc72-ce42-4795-ae08-cb08de393355 req-a7e70d42-0b65-44a4-a5e0-91074d29b2bf service nova] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2026.419431] env[63279]: DEBUG oslo_vmware.api [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087302, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.232163} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.419861] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2026.420166] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2026.420725] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2026.420954] env[63279]: INFO nova.compute.manager [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Took 1.23 seconds to destroy the instance on the hypervisor. [ 2026.421304] env[63279]: DEBUG oslo.service.loopingcall [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2026.422042] env[63279]: DEBUG nova.compute.manager [-] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2026.422042] env[63279]: DEBUG nova.network.neutron [-] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2026.444434] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73b648a6-1547-48f2-b4d9-3d262b420193 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2026.445681] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.078s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.445912] env[63279]: DEBUG nova.objects.instance [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lazy-loading 'resources' on Instance uuid ad435281-55a0-418a-8400-5c461a5c15ef {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2026.461223] env[63279]: DEBUG nova.network.neutron [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Successfully updated port: 5576a5f4-6c18-428c-82f9-6cedc1a2b828 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2026.492534] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2026.492650] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquired lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2026.493093] env[63279]: DEBUG nova.network.neutron [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2026.569117] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087303, 'name': Rename_Task, 'duration_secs': 0.142536} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.569180] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2026.569409] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62ce4076-e352-4c43-b8fa-6f109e9d5846 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.576830] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2026.576830] env[63279]: value = "task-2087304" [ 2026.576830] env[63279]: _type = "Task" [ 2026.576830] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.586838] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087304, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.672473] env[63279]: DEBUG nova.network.neutron [-] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2026.773653] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27d050e2-e902-4611-87fd-a6b52583e82d tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "0e12ab9b-a701-4e0f-9d96-939090f50494" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.858s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2026.819538] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2d1f4b5-83ea-42bc-af60-716baf906bb2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.831494] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cb5b88-bd4e-4ec7-b42d-77185457c154 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.874835] env[63279]: DEBUG nova.compute.manager [req-7a21cc72-ce42-4795-ae08-cb08de393355 req-a7e70d42-0b65-44a4-a5e0-91074d29b2bf service nova] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Detach interface failed, port_id=4805f07f-4add-493e-80cb-e2c75cc21104, reason: Instance ff9701ed-d545-44b4-911a-c4d809d0a771 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2026.964903] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "refresh_cache-5b5f87cb-cf35-418f-b5bd-b953524a285c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2026.966865] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquired lock "refresh_cache-5b5f87cb-cf35-418f-b5bd-b953524a285c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2026.967091] env[63279]: DEBUG nova.network.neutron [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2027.006846] env[63279]: DEBUG nova.compute.utils [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Can not refresh info_cache because instance was not found {{(pid=63279) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1055}} [ 2027.037259] env[63279]: DEBUG nova.network.neutron [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2027.090480] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087304, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.160091] env[63279]: DEBUG nova.network.neutron [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2027.174058] env[63279]: INFO nova.compute.manager [-] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Took 1.65 seconds to deallocate network for instance. [ 2027.276024] env[63279]: DEBUG nova.compute.manager [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2027.526444] env[63279]: DEBUG nova.network.neutron [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2027.558851] env[63279]: DEBUG nova.network.neutron [-] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2027.590149] env[63279]: DEBUG oslo_vmware.api [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087304, 'name': PowerOnVM_Task, 'duration_secs': 0.682342} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.590423] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2027.590632] env[63279]: DEBUG nova.compute.manager [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2027.592621] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406f8704-8847-426d-a051-dcd610d48d1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.617147] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f5b33c-07cb-4b1e-b1c2-3db6a9e5c0f4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.625812] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf03f67-b816-4c1d-9f9e-1fe4002e13d9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.662217] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35aef49-cf99-4215-8684-d8a12a65880c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.671449] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Releasing lock "refresh_cache-79032b2a-74f7-4c6d-8f71-f848fe372ba2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2027.671880] env[63279]: DEBUG nova.compute.manager [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2027.672095] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2027.672421] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2273c2ab-ef45-4623-941d-483793c615a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.676034] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3566b29-de76-48e5-a628-df31b7b28da8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.681192] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2027.690788] env[63279]: DEBUG nova.compute.provider_tree [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2027.695803] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3f4558-3c6c-4dcb-a1a2-b168f268a9d4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.707705] env[63279]: DEBUG nova.scheduler.client.report [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2027.734765] env[63279]: WARNING nova.virt.vmwareapi.vmops [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 79032b2a-74f7-4c6d-8f71-f848fe372ba2 could not be found. [ 2027.734765] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2027.734950] env[63279]: INFO nova.compute.manager [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Took 0.06 seconds to destroy the instance on the hypervisor. [ 2027.735081] env[63279]: DEBUG oslo.service.loopingcall [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2027.735333] env[63279]: DEBUG nova.compute.manager [-] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2027.735428] env[63279]: DEBUG nova.network.neutron [-] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2027.766631] env[63279]: DEBUG nova.network.neutron [-] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2027.800694] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2027.920440] env[63279]: DEBUG nova.network.neutron [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Updating instance_info_cache with network_info: [{"id": "5576a5f4-6c18-428c-82f9-6cedc1a2b828", "address": "fa:16:3e:1a:a2:31", "network": {"id": "9cfd684f-63e8-44aa-8569-0ab02c790458", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-318205966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f07bad15ea5419cbecc5840b4e96d01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5576a5f4-6c", "ovs_interfaceid": "5576a5f4-6c18-428c-82f9-6cedc1a2b828", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2027.959170] env[63279]: DEBUG oslo_concurrency.lockutils [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "357f08c9-4de9-4b84-8384-6bf130872f40" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2027.959454] env[63279]: DEBUG oslo_concurrency.lockutils [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "357f08c9-4de9-4b84-8384-6bf130872f40" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2027.959667] env[63279]: DEBUG oslo_concurrency.lockutils [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "357f08c9-4de9-4b84-8384-6bf130872f40-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2027.959882] env[63279]: DEBUG oslo_concurrency.lockutils [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "357f08c9-4de9-4b84-8384-6bf130872f40-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2027.960093] env[63279]: DEBUG oslo_concurrency.lockutils [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "357f08c9-4de9-4b84-8384-6bf130872f40-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2027.962143] env[63279]: INFO nova.compute.manager [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Terminating instance [ 2028.061419] env[63279]: INFO nova.compute.manager [-] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Took 1.64 seconds to deallocate network for instance. [ 2028.125513] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.212929] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.767s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.215507] env[63279]: DEBUG oslo_concurrency.lockutils [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.637s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.215799] env[63279]: DEBUG nova.objects.instance [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lazy-loading 'resources' on Instance uuid 5d4be656-defe-4332-b97e-e88b107ca4a1 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2028.232334] env[63279]: INFO nova.scheduler.client.report [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleted allocations for instance ad435281-55a0-418a-8400-5c461a5c15ef [ 2028.267976] env[63279]: DEBUG nova.network.neutron [-] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2028.428598] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Releasing lock "refresh_cache-5b5f87cb-cf35-418f-b5bd-b953524a285c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2028.429132] env[63279]: DEBUG nova.compute.manager [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Instance network_info: |[{"id": "5576a5f4-6c18-428c-82f9-6cedc1a2b828", "address": "fa:16:3e:1a:a2:31", "network": {"id": "9cfd684f-63e8-44aa-8569-0ab02c790458", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-318205966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f07bad15ea5419cbecc5840b4e96d01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5576a5f4-6c", "ovs_interfaceid": "5576a5f4-6c18-428c-82f9-6cedc1a2b828", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2028.429625] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:a2:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dcf5c3f7-4e33-4f21-b323-3673930b789c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5576a5f4-6c18-428c-82f9-6cedc1a2b828', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2028.440050] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Creating folder: Project (4f07bad15ea5419cbecc5840b4e96d01). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2028.440403] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1390db4-74e5-46f0-9888-a47f6ff692d5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.455209] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Created folder: Project (4f07bad15ea5419cbecc5840b4e96d01) in parent group-v427491. [ 2028.455451] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Creating folder: Instances. Parent ref: group-v427648. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2028.455937] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aedb5101-1fae-4d98-95b3-d5b1df92f228 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.466231] env[63279]: DEBUG nova.compute.manager [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2028.466457] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2028.466885] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Created folder: Instances in parent group-v427648. [ 2028.467152] env[63279]: DEBUG oslo.service.loopingcall [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2028.467931] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91e31a7-3ecc-4ae6-b099-c9dd3a8a0481 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.471800] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2028.472072] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d928c08-d098-4a87-972b-400a83cac03d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.495827] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2028.498462] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84560d94-4ad3-45db-8e6e-f6d88e01a69d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.500167] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2028.500167] env[63279]: value = "task-2087307" [ 2028.500167] env[63279]: _type = "Task" [ 2028.500167] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.506545] env[63279]: DEBUG oslo_vmware.api [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 2028.506545] env[63279]: value = "task-2087308" [ 2028.506545] env[63279]: _type = "Task" [ 2028.506545] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.516491] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087307, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.521903] env[63279]: DEBUG oslo_vmware.api [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087308, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.568475] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.694505] env[63279]: DEBUG nova.compute.manager [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Received event network-vif-plugged-5576a5f4-6c18-428c-82f9-6cedc1a2b828 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2028.694814] env[63279]: DEBUG oslo_concurrency.lockutils [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] Acquiring lock "5b5f87cb-cf35-418f-b5bd-b953524a285c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.694959] env[63279]: DEBUG oslo_concurrency.lockutils [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] Lock "5b5f87cb-cf35-418f-b5bd-b953524a285c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.695423] env[63279]: DEBUG oslo_concurrency.lockutils [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] Lock "5b5f87cb-cf35-418f-b5bd-b953524a285c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.695799] env[63279]: DEBUG nova.compute.manager [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] No waiting events found dispatching network-vif-plugged-5576a5f4-6c18-428c-82f9-6cedc1a2b828 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2028.696084] env[63279]: WARNING nova.compute.manager [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Received unexpected event network-vif-plugged-5576a5f4-6c18-428c-82f9-6cedc1a2b828 for instance with vm_state building and task_state spawning. [ 2028.696084] env[63279]: DEBUG nova.compute.manager [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Received event network-changed-5576a5f4-6c18-428c-82f9-6cedc1a2b828 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2028.696287] env[63279]: DEBUG nova.compute.manager [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Refreshing instance network info cache due to event network-changed-5576a5f4-6c18-428c-82f9-6cedc1a2b828. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2028.696826] env[63279]: DEBUG oslo_concurrency.lockutils [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] Acquiring lock "refresh_cache-5b5f87cb-cf35-418f-b5bd-b953524a285c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2028.696826] env[63279]: DEBUG oslo_concurrency.lockutils [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] Acquired lock "refresh_cache-5b5f87cb-cf35-418f-b5bd-b953524a285c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2028.696969] env[63279]: DEBUG nova.network.neutron [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Refreshing network info cache for port 5576a5f4-6c18-428c-82f9-6cedc1a2b828 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2028.742498] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2774b0bb-ce28-4633-a70c-66ac2aee4c3a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "ad435281-55a0-418a-8400-5c461a5c15ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.083s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.771612] env[63279]: INFO nova.compute.manager [-] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Took 1.04 seconds to deallocate network for instance. [ 2029.014592] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087307, 'name': CreateVM_Task, 'duration_secs': 0.485525} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.017589] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2029.018586] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2029.018699] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2029.019069] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2029.022360] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb646fd9-014d-4f3a-a53f-a6c3674d470e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.024106] env[63279]: DEBUG oslo_vmware.api [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087308, 'name': PowerOffVM_Task, 'duration_secs': 0.223535} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.024435] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2029.024609] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2029.025592] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9a6f9be-b519-4920-9cd4-e0df484a6c95 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.028619] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2029.028619] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a2fad8-fdfe-c6e3-8495-04a6e4f514ac" [ 2029.028619] env[63279]: _type = "Task" [ 2029.028619] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.040690] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a2fad8-fdfe-c6e3-8495-04a6e4f514ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.147751] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2029.148159] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2029.148278] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Deleting the datastore file [datastore1] 357f08c9-4de9-4b84-8384-6bf130872f40 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2029.149627] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b630a417-e0e6-40aa-9ee2-b70f5ba571b2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.155804] env[63279]: DEBUG oslo_vmware.api [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for the task: (returnval){ [ 2029.155804] env[63279]: value = "task-2087310" [ 2029.155804] env[63279]: _type = "Task" [ 2029.155804] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.165029] env[63279]: DEBUG oslo_vmware.api [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.234915] env[63279]: INFO nova.compute.manager [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Rebuilding instance [ 2029.278561] env[63279]: INFO nova.compute.manager [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Instance disappeared during terminate [ 2029.278708] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8106d6b8-905a-4c2b-bdd4-1d1f32b348cc tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "79032b2a-74f7-4c6d-8f71-f848fe372ba2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.298s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.297704] env[63279]: DEBUG nova.compute.manager [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2029.299643] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2b8477-5a3a-42ec-baf8-be1740a09e0a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.334288] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783d02e2-d8c2-41c6-96b6-afe2cf542452 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.344622] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615cd841-6882-4e32-8e33-00c66b699583 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.384432] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4332df-6e82-4b18-9f33-87768f844508 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.395124] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fc79f1-c025-4f90-802c-76d3836a7dc3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.414331] env[63279]: DEBUG nova.compute.provider_tree [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2029.543023] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a2fad8-fdfe-c6e3-8495-04a6e4f514ac, 'name': SearchDatastore_Task, 'duration_secs': 0.015727} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.543023] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2029.543023] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2029.543023] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2029.543459] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2029.543459] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2029.543459] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34cab2b8-41ea-420d-b27b-78839735ede9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.554642] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2029.554852] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2029.555832] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc47fe76-3692-43dc-9005-3b2d380352d8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.566155] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2029.566155] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ddffc6-6714-0d9b-e3fb-bad001f46a21" [ 2029.566155] env[63279]: _type = "Task" [ 2029.566155] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.581172] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ddffc6-6714-0d9b-e3fb-bad001f46a21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.597335] env[63279]: DEBUG nova.network.neutron [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Updated VIF entry in instance network info cache for port 5576a5f4-6c18-428c-82f9-6cedc1a2b828. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2029.597749] env[63279]: DEBUG nova.network.neutron [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Updating instance_info_cache with network_info: [{"id": "5576a5f4-6c18-428c-82f9-6cedc1a2b828", "address": "fa:16:3e:1a:a2:31", "network": {"id": "9cfd684f-63e8-44aa-8569-0ab02c790458", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-318205966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f07bad15ea5419cbecc5840b4e96d01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5576a5f4-6c", "ovs_interfaceid": "5576a5f4-6c18-428c-82f9-6cedc1a2b828", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2029.667613] env[63279]: DEBUG oslo_vmware.api [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Task: {'id': task-2087310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239197} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.668047] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2029.668271] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2029.668512] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2029.668823] env[63279]: INFO nova.compute.manager [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Took 1.20 seconds to destroy the instance on the hypervisor. [ 2029.669149] env[63279]: DEBUG oslo.service.loopingcall [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2029.669425] env[63279]: DEBUG nova.compute.manager [-] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2029.669595] env[63279]: DEBUG nova.network.neutron [-] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2029.921447] env[63279]: DEBUG nova.scheduler.client.report [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2030.077064] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ddffc6-6714-0d9b-e3fb-bad001f46a21, 'name': SearchDatastore_Task, 'duration_secs': 0.019765} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.077891] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aee73dda-91c9-42aa-a807-0c07fe3113e2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.083938] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2030.083938] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527ad14d-07f0-318e-2fd4-05b9b4206221" [ 2030.083938] env[63279]: _type = "Task" [ 2030.083938] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.092369] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527ad14d-07f0-318e-2fd4-05b9b4206221, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.100916] env[63279]: DEBUG oslo_concurrency.lockutils [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] Releasing lock "refresh_cache-5b5f87cb-cf35-418f-b5bd-b953524a285c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2030.101281] env[63279]: DEBUG nova.compute.manager [req-66995b76-3c6f-4f8c-adfa-e76dcf850787 req-4c515699-8caa-4281-9462-6b6d4814ae9f service nova] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Received event network-vif-deleted-c2c5d3a4-c909-4508-97e5-c87eee8b7d50 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2030.321276] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2030.321648] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0d377ca-9316-4a61-acc7-f91ff36220a2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.328521] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2030.328521] env[63279]: value = "task-2087311" [ 2030.328521] env[63279]: _type = "Task" [ 2030.328521] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.338810] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087311, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.427487] env[63279]: DEBUG oslo_concurrency.lockutils [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.212s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.431235] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.944s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.432155] env[63279]: INFO nova.compute.claims [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2030.457701] env[63279]: DEBUG nova.network.neutron [-] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2030.460879] env[63279]: INFO nova.scheduler.client.report [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Deleted allocations for instance 5d4be656-defe-4332-b97e-e88b107ca4a1 [ 2030.597252] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527ad14d-07f0-318e-2fd4-05b9b4206221, 'name': SearchDatastore_Task, 'duration_secs': 0.031329} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.597252] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2030.597252] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 5b5f87cb-cf35-418f-b5bd-b953524a285c/5b5f87cb-cf35-418f-b5bd-b953524a285c.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2030.597252] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da84384f-3061-41f9-874a-86133b4ac5af {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.605044] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2030.605044] env[63279]: value = "task-2087312" [ 2030.605044] env[63279]: _type = "Task" [ 2030.605044] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.620070] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087312, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.760946] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Acquiring lock "c8b42e3b-b841-4b79-a4f3-ef62577d4902" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2030.761843] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Lock "c8b42e3b-b841-4b79-a4f3-ef62577d4902" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.801012] env[63279]: DEBUG nova.compute.manager [req-375bbde9-c804-4306-b52d-cf5551749ada req-3b69385a-c3d1-4d5d-9b54-2af5f8bf7478 service nova] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Received event network-changed-d31aaccc-4f75-4bc4-898d-0f2680b13372 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2030.801242] env[63279]: DEBUG nova.compute.manager [req-375bbde9-c804-4306-b52d-cf5551749ada req-3b69385a-c3d1-4d5d-9b54-2af5f8bf7478 service nova] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Refreshing instance network info cache due to event network-changed-d31aaccc-4f75-4bc4-898d-0f2680b13372. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2030.801465] env[63279]: DEBUG oslo_concurrency.lockutils [req-375bbde9-c804-4306-b52d-cf5551749ada req-3b69385a-c3d1-4d5d-9b54-2af5f8bf7478 service nova] Acquiring lock "refresh_cache-0e12ab9b-a701-4e0f-9d96-939090f50494" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2030.801612] env[63279]: DEBUG oslo_concurrency.lockutils [req-375bbde9-c804-4306-b52d-cf5551749ada req-3b69385a-c3d1-4d5d-9b54-2af5f8bf7478 service nova] Acquired lock "refresh_cache-0e12ab9b-a701-4e0f-9d96-939090f50494" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2030.801778] env[63279]: DEBUG nova.network.neutron [req-375bbde9-c804-4306-b52d-cf5551749ada req-3b69385a-c3d1-4d5d-9b54-2af5f8bf7478 service nova] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Refreshing network info cache for port d31aaccc-4f75-4bc4-898d-0f2680b13372 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2030.839417] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087311, 'name': PowerOffVM_Task, 'duration_secs': 0.188526} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.842294] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2030.842294] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2030.842294] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec71ce93-7295-4ac1-8ca4-c362629e4b39 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.848028] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2030.848114] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-983c7ea7-b65c-41df-9e11-08e041ad7f39 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.963516] env[63279]: INFO nova.compute.manager [-] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Took 1.29 seconds to deallocate network for instance. [ 2030.971795] env[63279]: DEBUG oslo_concurrency.lockutils [None req-edb6b700-e848-4325-a4d2-78dc8c4a27f3 tempest-ListServerFiltersTestJSON-1458612431 tempest-ListServerFiltersTestJSON-1458612431-project-member] Lock "5d4be656-defe-4332-b97e-e88b107ca4a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.908s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.037453] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2031.037696] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2031.037882] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleting the datastore file [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2031.038199] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e627a82-0951-4e54-8752-3485c26685a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.044602] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2031.044602] env[63279]: value = "task-2087314" [ 2031.044602] env[63279]: _type = "Task" [ 2031.044602] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.053215] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087314, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.116128] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087312, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.472868] env[63279]: DEBUG oslo_concurrency.lockutils [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.566590] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087314, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.314317} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.569390] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2031.569616] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2031.569813] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2031.615919] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087312, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579749} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.617514] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 5b5f87cb-cf35-418f-b5bd-b953524a285c/5b5f87cb-cf35-418f-b5bd-b953524a285c.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2031.617514] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2031.620061] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b19454a8-17d2-497e-9998-60aecb5b830a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.628078] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2031.628078] env[63279]: value = "task-2087315" [ 2031.628078] env[63279]: _type = "Task" [ 2031.628078] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.636421] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.759656] env[63279]: DEBUG nova.network.neutron [req-375bbde9-c804-4306-b52d-cf5551749ada req-3b69385a-c3d1-4d5d-9b54-2af5f8bf7478 service nova] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Updated VIF entry in instance network info cache for port d31aaccc-4f75-4bc4-898d-0f2680b13372. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2031.760108] env[63279]: DEBUG nova.network.neutron [req-375bbde9-c804-4306-b52d-cf5551749ada req-3b69385a-c3d1-4d5d-9b54-2af5f8bf7478 service nova] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Updating instance_info_cache with network_info: [{"id": "d31aaccc-4f75-4bc4-898d-0f2680b13372", "address": "fa:16:3e:ca:57:f6", "network": {"id": "26ed2848-6f14-4264-af0d-f08d62ab4413", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-659194361-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e931e0ddf1b840fdb8743fbdba05b28d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd31aaccc-4f", "ovs_interfaceid": "d31aaccc-4f75-4bc4-898d-0f2680b13372", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.975191] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80025521-2382-48eb-a245-66e79f441632 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.985281] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02722a0-4668-40f9-9de3-f1cab9584769 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.026115] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2022b2a-8110-44a4-9645-b0e2e452d657 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.035654] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262a1c3c-d54b-4fb3-99f0-dd6e6aff1a7f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.052529] env[63279]: DEBUG nova.compute.provider_tree [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2032.138752] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087315, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063369} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.138899] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2032.139705] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680d5197-a4f7-463e-882d-837d81e22187 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.170100] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 5b5f87cb-cf35-418f-b5bd-b953524a285c/5b5f87cb-cf35-418f-b5bd-b953524a285c.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2032.170449] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f78b1c14-88dd-4a94-b4d1-6464a6bc3a71 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.190677] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2032.190677] env[63279]: value = "task-2087316" [ 2032.190677] env[63279]: _type = "Task" [ 2032.190677] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.200266] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087316, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.263986] env[63279]: DEBUG oslo_concurrency.lockutils [req-375bbde9-c804-4306-b52d-cf5551749ada req-3b69385a-c3d1-4d5d-9b54-2af5f8bf7478 service nova] Releasing lock "refresh_cache-0e12ab9b-a701-4e0f-9d96-939090f50494" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2032.264512] env[63279]: DEBUG nova.compute.manager [req-375bbde9-c804-4306-b52d-cf5551749ada req-3b69385a-c3d1-4d5d-9b54-2af5f8bf7478 service nova] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Received event network-vif-deleted-4160b9e6-5e90-458c-bb0f-afc6be383dc1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2032.505862] env[63279]: DEBUG oslo_vmware.rw_handles [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fb6da5-b4fe-c297-cca3-7a95159f9b54/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2032.506844] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148d1a08-e7f4-43e6-8b45-341f0d3ed941 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.515701] env[63279]: DEBUG oslo_vmware.rw_handles [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fb6da5-b4fe-c297-cca3-7a95159f9b54/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2032.515988] env[63279]: ERROR oslo_vmware.rw_handles [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fb6da5-b4fe-c297-cca3-7a95159f9b54/disk-0.vmdk due to incomplete transfer. [ 2032.516242] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b870425b-cbec-4dad-a731-d5381f70f043 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.525349] env[63279]: DEBUG oslo_vmware.rw_handles [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fb6da5-b4fe-c297-cca3-7a95159f9b54/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2032.525564] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Uploaded image cf3576cc-916a-43f4-926d-5dee7d5ab54a to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2032.527952] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2032.528862] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b6145437-eb47-431d-b81c-87ffb19c5595 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.536211] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2032.536211] env[63279]: value = "task-2087317" [ 2032.536211] env[63279]: _type = "Task" [ 2032.536211] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.550248] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087317, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.557018] env[63279]: DEBUG nova.scheduler.client.report [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2032.601396] env[63279]: DEBUG nova.virt.hardware [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2032.602365] env[63279]: DEBUG nova.virt.hardware [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2032.602365] env[63279]: DEBUG nova.virt.hardware [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2032.602365] env[63279]: DEBUG nova.virt.hardware [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2032.602365] env[63279]: DEBUG nova.virt.hardware [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2032.602365] env[63279]: DEBUG nova.virt.hardware [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2032.602584] env[63279]: DEBUG nova.virt.hardware [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2032.602649] env[63279]: DEBUG nova.virt.hardware [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2032.602852] env[63279]: DEBUG nova.virt.hardware [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2032.603033] env[63279]: DEBUG nova.virt.hardware [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2032.603215] env[63279]: DEBUG nova.virt.hardware [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2032.604104] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503bdcc7-c216-4b57-aae2-53aba27e6628 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.614198] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ebca9a-2515-4c5d-94f0-dd266b7b7c09 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.630353] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:65:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '678ebbe4-4c53-4eaf-a689-93981310f37d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc9f3899-95c1-4e79-b121-03c9a2c0bc44', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2032.639604] env[63279]: DEBUG oslo.service.loopingcall [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2032.639865] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2032.640181] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab3e078b-820f-443f-b922-2ebf9b2fee5d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.661876] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2032.661876] env[63279]: value = "task-2087318" [ 2032.661876] env[63279]: _type = "Task" [ 2032.661876] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.669888] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087318, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.700910] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087316, 'name': ReconfigVM_Task, 'duration_secs': 0.39886} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.701324] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 5b5f87cb-cf35-418f-b5bd-b953524a285c/5b5f87cb-cf35-418f-b5bd-b953524a285c.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2032.702029] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ccdcecb1-f7a1-4f93-a87d-a9086ccad1ba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.708640] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2032.708640] env[63279]: value = "task-2087319" [ 2032.708640] env[63279]: _type = "Task" [ 2032.708640] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.723167] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087319, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.047830] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087317, 'name': Destroy_Task, 'duration_secs': 0.501121} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.048263] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Destroyed the VM [ 2033.048516] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2033.048830] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8ca145c4-e4c4-4585-9fd9-b13d8b3701bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.054987] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2033.054987] env[63279]: value = "task-2087320" [ 2033.054987] env[63279]: _type = "Task" [ 2033.054987] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.059109] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.629s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2033.060376] env[63279]: DEBUG nova.compute.manager [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2033.065475] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.226s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.066291] env[63279]: DEBUG nova.objects.instance [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lazy-loading 'resources' on Instance uuid acf95fad-316c-4605-9008-24d4d7c05892 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2033.066845] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087320, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.174678] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087318, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.220290] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087319, 'name': Rename_Task, 'duration_secs': 0.150298} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.220683] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2033.220982] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5e9f569-8072-40c3-9f22-0d8ba939bbd6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.230023] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2033.230023] env[63279]: value = "task-2087321" [ 2033.230023] env[63279]: _type = "Task" [ 2033.230023] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.241856] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087321, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.318156] env[63279]: DEBUG oslo_concurrency.lockutils [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquiring lock "f7f88f1a-a81a-4208-88d7-6a264e642ab1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.318407] env[63279]: DEBUG oslo_concurrency.lockutils [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Lock "f7f88f1a-a81a-4208-88d7-6a264e642ab1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.565925] env[63279]: DEBUG oslo_vmware.api [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087320, 'name': RemoveSnapshot_Task, 'duration_secs': 0.488469} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.566998] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2033.567574] env[63279]: INFO nova.compute.manager [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Took 13.82 seconds to snapshot the instance on the hypervisor. [ 2033.574467] env[63279]: DEBUG nova.compute.utils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2033.577722] env[63279]: DEBUG nova.compute.manager [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2033.577860] env[63279]: DEBUG nova.network.neutron [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2033.621882] env[63279]: DEBUG nova.policy [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '655d692da88947b89104e1f14f7d71f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5a719a21fe248c49d0d0151d218866b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2033.675227] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "64e92bfc-c0d0-4918-9ba2-45ffedbf7e39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.675426] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "64e92bfc-c0d0-4918-9ba2-45ffedbf7e39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.675658] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "64e92bfc-c0d0-4918-9ba2-45ffedbf7e39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.675887] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "64e92bfc-c0d0-4918-9ba2-45ffedbf7e39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.676087] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "64e92bfc-c0d0-4918-9ba2-45ffedbf7e39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2033.681429] env[63279]: INFO nova.compute.manager [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Terminating instance [ 2033.689308] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087318, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.741229] env[63279]: DEBUG oslo_vmware.api [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087321, 'name': PowerOnVM_Task, 'duration_secs': 0.502846} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.741554] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2033.741770] env[63279]: INFO nova.compute.manager [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Took 8.44 seconds to spawn the instance on the hypervisor. [ 2033.741955] env[63279]: DEBUG nova.compute.manager [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2033.743556] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f5a332-ba54-401a-978e-2bc9a4de15e1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.017607] env[63279]: DEBUG nova.network.neutron [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Successfully created port: 5518a04b-dc37-4dc2-89d4-059d6e54f634 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2034.077874] env[63279]: DEBUG nova.compute.manager [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2034.081128] env[63279]: DEBUG nova.compute.manager [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Instance disappeared during snapshot {{(pid=63279) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 2034.098647] env[63279]: DEBUG nova.compute.manager [None req-9d9741e8-1ae9-4073-930a-89163f440e1a tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Image not found during clean up cf3576cc-916a-43f4-926d-5dee7d5ab54a {{(pid=63279) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 2034.107888] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb22d1b4-b2ac-4c90-ac8d-3477873dd628 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.123869] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b43577-da76-4970-a8e4-caee70e6f8d8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.157178] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674ea033-dd9f-489d-9fc8-aa076ac4654f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.164977] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d38ea0-dfa2-4043-9a14-1986f7e3ff2b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.178293] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087318, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.186674] env[63279]: DEBUG nova.compute.provider_tree [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2034.192481] env[63279]: DEBUG nova.compute.manager [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2034.192625] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2034.193490] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f6956b-bebe-4fb0-b600-dc2b12e28e65 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.201532] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2034.201776] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98971954-d326-4767-bef1-6ce255786fc3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.268370] env[63279]: INFO nova.compute.manager [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Took 47.20 seconds to build instance. [ 2034.299126] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "f2a68d73-49d6-4b38-aff1-c2eb850f2ca6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2034.299126] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "f2a68d73-49d6-4b38-aff1-c2eb850f2ca6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2034.679345] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087318, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.690911] env[63279]: DEBUG nova.scheduler.client.report [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2034.771431] env[63279]: DEBUG oslo_concurrency.lockutils [None req-968ae4c0-2363-46b4-8c8d-1bd213d554cd tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "5b5f87cb-cf35-418f-b5bd-b953524a285c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.225s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2035.091164] env[63279]: DEBUG nova.compute.manager [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2035.121668] env[63279]: DEBUG nova.virt.hardware [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2035.121921] env[63279]: DEBUG nova.virt.hardware [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2035.122171] env[63279]: DEBUG nova.virt.hardware [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2035.122382] env[63279]: DEBUG nova.virt.hardware [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2035.122530] env[63279]: DEBUG nova.virt.hardware [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2035.122680] env[63279]: DEBUG nova.virt.hardware [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2035.122918] env[63279]: DEBUG nova.virt.hardware [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2035.124017] env[63279]: DEBUG nova.virt.hardware [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2035.124017] env[63279]: DEBUG nova.virt.hardware [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2035.124017] env[63279]: DEBUG nova.virt.hardware [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2035.124017] env[63279]: DEBUG nova.virt.hardware [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2035.124496] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f02e97-5804-4fe5-a2f8-cef68a473ea3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.133363] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a8bcb1-70af-40ad-bbe2-1b5a85bd9f65 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.180167] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087318, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.196250] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.131s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2035.200972] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.755s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2035.200972] env[63279]: DEBUG nova.objects.instance [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lazy-loading 'resources' on Instance uuid 32789822-cb54-43e7-beae-b5ed3002f4ad {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2035.228189] env[63279]: INFO nova.scheduler.client.report [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Deleted allocations for instance acf95fad-316c-4605-9008-24d4d7c05892 [ 2035.276452] env[63279]: DEBUG nova.compute.manager [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2035.512523] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2035.512754] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2035.512941] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleting the datastore file [datastore1] 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2035.513236] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68e04b1f-4a87-4c6a-ba0b-996fd6fec418 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.520068] env[63279]: DEBUG oslo_vmware.api [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2035.520068] env[63279]: value = "task-2087323" [ 2035.520068] env[63279]: _type = "Task" [ 2035.520068] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.530364] env[63279]: DEBUG oslo_vmware.api [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087323, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.685208] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087318, 'name': CreateVM_Task, 'duration_secs': 2.956812} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.685208] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2035.685208] env[63279]: DEBUG oslo_concurrency.lockutils [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.685208] env[63279]: DEBUG oslo_concurrency.lockutils [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.685208] env[63279]: DEBUG oslo_concurrency.lockutils [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2035.685208] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7756a27-94f8-4912-88e8-6624fe1a9bdc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.690604] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2035.690604] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b34273-a45a-2495-2f09-c2b60698ffcd" [ 2035.690604] env[63279]: _type = "Task" [ 2035.690604] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.700748] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b34273-a45a-2495-2f09-c2b60698ffcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.741702] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dd23a8b6-623a-4385-a528-bdfe10dd648b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "acf95fad-316c-4605-9008-24d4d7c05892" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.598s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2035.802537] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.854840] env[63279]: DEBUG nova.compute.manager [req-d069eeb4-00e2-4ea6-b3b3-8c87d46198b4 req-098be670-4e2b-46cc-9d2b-9c3157280cf7 service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Received event network-changed-5576a5f4-6c18-428c-82f9-6cedc1a2b828 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2035.854840] env[63279]: DEBUG nova.compute.manager [req-d069eeb4-00e2-4ea6-b3b3-8c87d46198b4 req-098be670-4e2b-46cc-9d2b-9c3157280cf7 service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Refreshing instance network info cache due to event network-changed-5576a5f4-6c18-428c-82f9-6cedc1a2b828. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2035.854840] env[63279]: DEBUG oslo_concurrency.lockutils [req-d069eeb4-00e2-4ea6-b3b3-8c87d46198b4 req-098be670-4e2b-46cc-9d2b-9c3157280cf7 service nova] Acquiring lock "refresh_cache-5b5f87cb-cf35-418f-b5bd-b953524a285c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.854840] env[63279]: DEBUG oslo_concurrency.lockutils [req-d069eeb4-00e2-4ea6-b3b3-8c87d46198b4 req-098be670-4e2b-46cc-9d2b-9c3157280cf7 service nova] Acquired lock "refresh_cache-5b5f87cb-cf35-418f-b5bd-b953524a285c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.854840] env[63279]: DEBUG nova.network.neutron [req-d069eeb4-00e2-4ea6-b3b3-8c87d46198b4 req-098be670-4e2b-46cc-9d2b-9c3157280cf7 service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Refreshing network info cache for port 5576a5f4-6c18-428c-82f9-6cedc1a2b828 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2036.029170] env[63279]: DEBUG oslo_vmware.api [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087323, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204717} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.029280] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2036.029440] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2036.029623] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2036.029798] env[63279]: INFO nova.compute.manager [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Took 1.84 seconds to destroy the instance on the hypervisor. [ 2036.030050] env[63279]: DEBUG oslo.service.loopingcall [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2036.032429] env[63279]: DEBUG nova.compute.manager [-] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2036.032532] env[63279]: DEBUG nova.network.neutron [-] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2036.178966] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "5bb445d3-1b12-4a1b-ad2a-cbc929b13aee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2036.179262] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "5bb445d3-1b12-4a1b-ad2a-cbc929b13aee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.179473] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "5bb445d3-1b12-4a1b-ad2a-cbc929b13aee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2036.179662] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "5bb445d3-1b12-4a1b-ad2a-cbc929b13aee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.179832] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "5bb445d3-1b12-4a1b-ad2a-cbc929b13aee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.182501] env[63279]: INFO nova.compute.manager [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Terminating instance [ 2036.203851] env[63279]: DEBUG nova.compute.manager [req-ce5fce8e-a0f1-4776-9f33-9a4107da03ec req-4e7f293a-8015-42e0-8f4d-c32c4e7c1a75 service nova] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Received event network-vif-plugged-5518a04b-dc37-4dc2-89d4-059d6e54f634 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2036.203851] env[63279]: DEBUG oslo_concurrency.lockutils [req-ce5fce8e-a0f1-4776-9f33-9a4107da03ec req-4e7f293a-8015-42e0-8f4d-c32c4e7c1a75 service nova] Acquiring lock "7db0c32d-36a4-4452-bb07-06de0c93ab50-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2036.204053] env[63279]: DEBUG oslo_concurrency.lockutils [req-ce5fce8e-a0f1-4776-9f33-9a4107da03ec req-4e7f293a-8015-42e0-8f4d-c32c4e7c1a75 service nova] Lock "7db0c32d-36a4-4452-bb07-06de0c93ab50-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.204229] env[63279]: DEBUG oslo_concurrency.lockutils [req-ce5fce8e-a0f1-4776-9f33-9a4107da03ec req-4e7f293a-8015-42e0-8f4d-c32c4e7c1a75 service nova] Lock "7db0c32d-36a4-4452-bb07-06de0c93ab50-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.204695] env[63279]: DEBUG nova.compute.manager [req-ce5fce8e-a0f1-4776-9f33-9a4107da03ec req-4e7f293a-8015-42e0-8f4d-c32c4e7c1a75 service nova] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] No waiting events found dispatching network-vif-plugged-5518a04b-dc37-4dc2-89d4-059d6e54f634 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2036.204695] env[63279]: WARNING nova.compute.manager [req-ce5fce8e-a0f1-4776-9f33-9a4107da03ec req-4e7f293a-8015-42e0-8f4d-c32c4e7c1a75 service nova] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Received unexpected event network-vif-plugged-5518a04b-dc37-4dc2-89d4-059d6e54f634 for instance with vm_state building and task_state spawning. [ 2036.207086] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d379e5-0724-48c4-a9d8-3e6889ef26eb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.215885] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b34273-a45a-2495-2f09-c2b60698ffcd, 'name': SearchDatastore_Task, 'duration_secs': 0.019533} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.216501] env[63279]: DEBUG oslo_concurrency.lockutils [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2036.216717] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2036.216996] env[63279]: DEBUG oslo_concurrency.lockutils [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2036.217161] env[63279]: DEBUG oslo_concurrency.lockutils [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2036.217340] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2036.217583] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b05ac34-9bd0-442f-8b4b-4aa1b500986f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.222797] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717e3801-afbc-4ff9-9ab1-109b10ecf575 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.228436] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2036.228699] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2036.259264] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a560cb2-37bc-4006-b6e8-c2eabe238a49 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.266170] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72627578-2373-4fdd-b406-b3c2a3ee6940 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.273123] env[63279]: DEBUG nova.network.neutron [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Successfully updated port: 5518a04b-dc37-4dc2-89d4-059d6e54f634 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2036.276502] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739c36a0-5bbe-43bc-bf55-45ed3abc167e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.280811] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2036.280811] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f4a1c0-47da-d4e6-8c99-81b443f5edbb" [ 2036.280811] env[63279]: _type = "Task" [ 2036.280811] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.295295] env[63279]: DEBUG nova.compute.provider_tree [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2036.302323] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f4a1c0-47da-d4e6-8c99-81b443f5edbb, 'name': SearchDatastore_Task, 'duration_secs': 0.012035} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.303114] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d960cdf-39c7-4ff0-b07c-e96929f91410 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.309474] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2036.309474] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c35459-9868-f4b6-ec4d-705fdd66855f" [ 2036.309474] env[63279]: _type = "Task" [ 2036.309474] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.317869] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c35459-9868-f4b6-ec4d-705fdd66855f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.691936] env[63279]: DEBUG nova.compute.manager [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2036.692585] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2036.693603] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13bf415c-8834-4b64-b1c6-da7744f34ae3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.704981] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2036.705630] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5892083e-adf2-4e84-8c49-eebac10fd03b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.714231] env[63279]: DEBUG oslo_vmware.api [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 2036.714231] env[63279]: value = "task-2087324" [ 2036.714231] env[63279]: _type = "Task" [ 2036.714231] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.724338] env[63279]: DEBUG oslo_vmware.api [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087324, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.784187] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "refresh_cache-7db0c32d-36a4-4452-bb07-06de0c93ab50" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2036.784187] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "refresh_cache-7db0c32d-36a4-4452-bb07-06de0c93ab50" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2036.784187] env[63279]: DEBUG nova.network.neutron [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2036.823334] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c35459-9868-f4b6-ec4d-705fdd66855f, 'name': SearchDatastore_Task, 'duration_secs': 0.009452} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.823334] env[63279]: DEBUG oslo_concurrency.lockutils [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2036.823334] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51/6388f912-ae70-4e8f-b8e4-ceb02e0f8a51.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2036.823334] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b0e1adc-1fb7-4386-bc08-09a6e7a358a9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.826779] env[63279]: ERROR nova.scheduler.client.report [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [req-b5506dce-5c81-43d7-a0d8-e8f310e71bbf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b5506dce-5c81-43d7-a0d8-e8f310e71bbf"}]} [ 2036.830856] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2036.830856] env[63279]: value = "task-2087325" [ 2036.830856] env[63279]: _type = "Task" [ 2036.830856] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.839857] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.851859] env[63279]: DEBUG nova.scheduler.client.report [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2036.870043] env[63279]: DEBUG nova.scheduler.client.report [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2036.870043] env[63279]: DEBUG nova.compute.provider_tree [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2036.885708] env[63279]: DEBUG nova.scheduler.client.report [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2036.910205] env[63279]: DEBUG nova.scheduler.client.report [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2036.939665] env[63279]: DEBUG nova.network.neutron [req-d069eeb4-00e2-4ea6-b3b3-8c87d46198b4 req-098be670-4e2b-46cc-9d2b-9c3157280cf7 service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Updated VIF entry in instance network info cache for port 5576a5f4-6c18-428c-82f9-6cedc1a2b828. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2036.939665] env[63279]: DEBUG nova.network.neutron [req-d069eeb4-00e2-4ea6-b3b3-8c87d46198b4 req-098be670-4e2b-46cc-9d2b-9c3157280cf7 service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Updating instance_info_cache with network_info: [{"id": "5576a5f4-6c18-428c-82f9-6cedc1a2b828", "address": "fa:16:3e:1a:a2:31", "network": {"id": "9cfd684f-63e8-44aa-8569-0ab02c790458", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-318205966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f07bad15ea5419cbecc5840b4e96d01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5576a5f4-6c", "ovs_interfaceid": "5576a5f4-6c18-428c-82f9-6cedc1a2b828", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.225049] env[63279]: DEBUG oslo_vmware.api [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087324, 'name': PowerOffVM_Task, 'duration_secs': 0.220628} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.228706] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2037.228706] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2037.228706] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98bb03b0-1680-4f3c-a6ae-96f968216073 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.238888] env[63279]: DEBUG nova.network.neutron [-] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.322969] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2037.323209] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2037.323392] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Deleting the datastore file [datastore1] 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2037.324155] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4fb928e5-a359-4b9a-927c-4cdf0e47cee8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.330221] env[63279]: DEBUG oslo_vmware.api [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for the task: (returnval){ [ 2037.330221] env[63279]: value = "task-2087327" [ 2037.330221] env[63279]: _type = "Task" [ 2037.330221] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.343674] env[63279]: DEBUG oslo_vmware.api [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087327, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.348163] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087325, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.351831] env[63279]: DEBUG nova.network.neutron [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2037.451832] env[63279]: DEBUG oslo_concurrency.lockutils [req-d069eeb4-00e2-4ea6-b3b3-8c87d46198b4 req-098be670-4e2b-46cc-9d2b-9c3157280cf7 service nova] Releasing lock "refresh_cache-5b5f87cb-cf35-418f-b5bd-b953524a285c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2037.471040] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5e79a2-6927-4d63-ae6b-78dcd2b3ff88 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.478506] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a66ec0c-aa6a-40f7-b77e-3a7c85984750 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.516971] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ccdf21-b035-4b48-87a0-20a1e8918d11 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.525792] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f4b49d-d7d5-4608-b19b-00e46d8b1250 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.546439] env[63279]: DEBUG nova.compute.provider_tree [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2037.665421] env[63279]: DEBUG nova.network.neutron [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Updating instance_info_cache with network_info: [{"id": "5518a04b-dc37-4dc2-89d4-059d6e54f634", "address": "fa:16:3e:ea:b5:a6", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5518a04b-dc", "ovs_interfaceid": "5518a04b-dc37-4dc2-89d4-059d6e54f634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.743754] env[63279]: INFO nova.compute.manager [-] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Took 1.71 seconds to deallocate network for instance. [ 2037.843789] env[63279]: DEBUG oslo_vmware.api [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087327, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.847016] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087325, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.866163} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.847536] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51/6388f912-ae70-4e8f-b8e4-ceb02e0f8a51.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2037.847757] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2037.848069] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b3efd880-519d-45eb-859b-c8c293022a32 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.854490] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2037.854490] env[63279]: value = "task-2087328" [ 2037.854490] env[63279]: _type = "Task" [ 2037.854490] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.862517] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087328, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.079046] env[63279]: DEBUG nova.scheduler.client.report [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 89 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2038.079046] env[63279]: DEBUG nova.compute.provider_tree [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 89 to 90 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2038.079046] env[63279]: DEBUG nova.compute.provider_tree [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2038.170350] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "refresh_cache-7db0c32d-36a4-4452-bb07-06de0c93ab50" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2038.170469] env[63279]: DEBUG nova.compute.manager [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Instance network_info: |[{"id": "5518a04b-dc37-4dc2-89d4-059d6e54f634", "address": "fa:16:3e:ea:b5:a6", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5518a04b-dc", "ovs_interfaceid": "5518a04b-dc37-4dc2-89d4-059d6e54f634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2038.171356] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:b5:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5518a04b-dc37-4dc2-89d4-059d6e54f634', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2038.181862] env[63279]: DEBUG oslo.service.loopingcall [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2038.181862] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2038.181862] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8429b6ca-6dce-40dc-aa8d-be2113075284 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.203904] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2038.203904] env[63279]: value = "task-2087329" [ 2038.203904] env[63279]: _type = "Task" [ 2038.203904] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.213844] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087329, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.250988] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.341237] env[63279]: DEBUG nova.compute.manager [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Received event network-changed-5518a04b-dc37-4dc2-89d4-059d6e54f634 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2038.341589] env[63279]: DEBUG nova.compute.manager [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Refreshing instance network info cache due to event network-changed-5518a04b-dc37-4dc2-89d4-059d6e54f634. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2038.342978] env[63279]: DEBUG oslo_concurrency.lockutils [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] Acquiring lock "refresh_cache-7db0c32d-36a4-4452-bb07-06de0c93ab50" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2038.342978] env[63279]: DEBUG oslo_concurrency.lockutils [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] Acquired lock "refresh_cache-7db0c32d-36a4-4452-bb07-06de0c93ab50" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2038.342978] env[63279]: DEBUG nova.network.neutron [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Refreshing network info cache for port 5518a04b-dc37-4dc2-89d4-059d6e54f634 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2038.349066] env[63279]: DEBUG oslo_vmware.api [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Task: {'id': task-2087327, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.578805} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.349596] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2038.349788] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2038.349976] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2038.350206] env[63279]: INFO nova.compute.manager [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Took 1.66 seconds to destroy the instance on the hypervisor. [ 2038.351352] env[63279]: DEBUG oslo.service.loopingcall [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2038.351593] env[63279]: DEBUG nova.compute.manager [-] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2038.351692] env[63279]: DEBUG nova.network.neutron [-] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2038.366668] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087328, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.153855} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.366951] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2038.368025] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5c0c76-b3a5-4e1b-be80-8c4f82b27adb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.400099] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51/6388f912-ae70-4e8f-b8e4-ceb02e0f8a51.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2038.403407] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7400f80d-abec-4b15-96ed-f2c5484f0ffa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.425942] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2038.425942] env[63279]: value = "task-2087330" [ 2038.425942] env[63279]: _type = "Task" [ 2038.425942] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.435019] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087330, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.588819] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.389s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.591412] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.259s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.593044] env[63279]: INFO nova.compute.claims [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2038.634745] env[63279]: INFO nova.scheduler.client.report [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Deleted allocations for instance 32789822-cb54-43e7-beae-b5ed3002f4ad [ 2038.715357] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087329, 'name': CreateVM_Task, 'duration_secs': 0.396809} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.716165] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2038.717041] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2038.717178] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2038.717774] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2038.717990] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d466303-5ff9-4d75-97b5-09d6799cb60f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.723345] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2038.723345] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]524a1869-aef1-e3ef-2af4-52dc75bf922b" [ 2038.723345] env[63279]: _type = "Task" [ 2038.723345] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.731869] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524a1869-aef1-e3ef-2af4-52dc75bf922b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.936432] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087330, 'name': ReconfigVM_Task, 'duration_secs': 0.309579} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.936713] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51/6388f912-ae70-4e8f-b8e4-ceb02e0f8a51.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2038.937347] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7256f13b-ebe4-49b7-9769-bb8cf000294e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.946472] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2038.946472] env[63279]: value = "task-2087331" [ 2038.946472] env[63279]: _type = "Task" [ 2038.946472] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.954876] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087331, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.141869] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ccf4cc3-01a1-4613-8c3b-fa276e803cd1 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "32789822-cb54-43e7-beae-b5ed3002f4ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.545s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.144756] env[63279]: DEBUG nova.network.neutron [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Updated VIF entry in instance network info cache for port 5518a04b-dc37-4dc2-89d4-059d6e54f634. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2039.144756] env[63279]: DEBUG nova.network.neutron [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Updating instance_info_cache with network_info: [{"id": "5518a04b-dc37-4dc2-89d4-059d6e54f634", "address": "fa:16:3e:ea:b5:a6", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5518a04b-dc", "ovs_interfaceid": "5518a04b-dc37-4dc2-89d4-059d6e54f634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2039.203135] env[63279]: DEBUG nova.network.neutron [-] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2039.234317] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524a1869-aef1-e3ef-2af4-52dc75bf922b, 'name': SearchDatastore_Task, 'duration_secs': 0.042094} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.234774] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2039.234897] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2039.235155] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2039.235878] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.235878] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2039.235878] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c9bc8e7-b81e-4d21-9d0a-7915285c05c1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.246057] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2039.246057] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2039.248202] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a72f0470-dfa9-486c-a881-4d99e2c5baa2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.253213] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2039.253213] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f58632-5ae2-b6d5-44aa-ae80b80bf79f" [ 2039.253213] env[63279]: _type = "Task" [ 2039.253213] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.260865] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f58632-5ae2-b6d5-44aa-ae80b80bf79f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.459608] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087331, 'name': Rename_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.646694] env[63279]: DEBUG oslo_concurrency.lockutils [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] Releasing lock "refresh_cache-7db0c32d-36a4-4452-bb07-06de0c93ab50" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2039.646961] env[63279]: DEBUG nova.compute.manager [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Received event network-vif-deleted-a84d472c-4f07-4a61-8eca-135f97267755 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2039.647170] env[63279]: DEBUG nova.compute.manager [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Received event network-changed-5576a5f4-6c18-428c-82f9-6cedc1a2b828 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2039.647338] env[63279]: DEBUG nova.compute.manager [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Refreshing instance network info cache due to event network-changed-5576a5f4-6c18-428c-82f9-6cedc1a2b828. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2039.647551] env[63279]: DEBUG oslo_concurrency.lockutils [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] Acquiring lock "refresh_cache-5b5f87cb-cf35-418f-b5bd-b953524a285c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2039.647694] env[63279]: DEBUG oslo_concurrency.lockutils [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] Acquired lock "refresh_cache-5b5f87cb-cf35-418f-b5bd-b953524a285c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.647859] env[63279]: DEBUG nova.network.neutron [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Refreshing network info cache for port 5576a5f4-6c18-428c-82f9-6cedc1a2b828 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2039.705650] env[63279]: INFO nova.compute.manager [-] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Took 1.35 seconds to deallocate network for instance. [ 2039.765145] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f58632-5ae2-b6d5-44aa-ae80b80bf79f, 'name': SearchDatastore_Task, 'duration_secs': 0.030354} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.768733] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa5250e7-d298-4f2c-9dab-2fccddf59446 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.773920] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2039.773920] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52999a0c-8fb7-e6bb-e359-92650323f83b" [ 2039.773920] env[63279]: _type = "Task" [ 2039.773920] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.784820] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52999a0c-8fb7-e6bb-e359-92650323f83b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.816046] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.816153] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.958579] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087331, 'name': Rename_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.079010] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335594c2-113e-448a-804e-a465cb7a6dec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.088045] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a205ccb-782f-4b1c-8ecd-240690c97e1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.121204] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8b490a-f9a3-4e93-8f4b-9167ae32b38e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.126740] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ad6f08-2ac8-4a1a-bb8a-e404006cfb5d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.141325] env[63279]: DEBUG nova.compute.provider_tree [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2040.216753] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.287547] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52999a0c-8fb7-e6bb-e359-92650323f83b, 'name': SearchDatastore_Task, 'duration_secs': 0.028139} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.287883] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.288224] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 7db0c32d-36a4-4452-bb07-06de0c93ab50/7db0c32d-36a4-4452-bb07-06de0c93ab50.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2040.288539] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fee9e8bf-cdba-4e69-a1e8-0a20f4ba5b55 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.295346] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2040.295346] env[63279]: value = "task-2087332" [ 2040.295346] env[63279]: _type = "Task" [ 2040.295346] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.305321] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087332, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.371118] env[63279]: DEBUG nova.compute.manager [req-c07d8afa-b084-45ac-8840-6bb6996f014c req-484fdd31-41b2-4a52-95da-5cae902b17bd service nova] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Received event network-vif-deleted-8bca335c-50ae-4ba0-9cde-a8d640c633e1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2040.398257] env[63279]: DEBUG nova.network.neutron [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Updated VIF entry in instance network info cache for port 5576a5f4-6c18-428c-82f9-6cedc1a2b828. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2040.398754] env[63279]: DEBUG nova.network.neutron [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Updating instance_info_cache with network_info: [{"id": "5576a5f4-6c18-428c-82f9-6cedc1a2b828", "address": "fa:16:3e:1a:a2:31", "network": {"id": "9cfd684f-63e8-44aa-8569-0ab02c790458", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-318205966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f07bad15ea5419cbecc5840b4e96d01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5576a5f4-6c", "ovs_interfaceid": "5576a5f4-6c18-428c-82f9-6cedc1a2b828", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.458078] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087331, 'name': Rename_Task, 'duration_secs': 1.148366} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.458387] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2040.458655] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6df857c4-cab6-48b3-863a-422d6b469e69 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.464552] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2040.464552] env[63279]: value = "task-2087333" [ 2040.464552] env[63279]: _type = "Task" [ 2040.464552] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.472115] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087333, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.645028] env[63279]: DEBUG nova.scheduler.client.report [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2040.808638] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087332, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499424} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.811683] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 7db0c32d-36a4-4452-bb07-06de0c93ab50/7db0c32d-36a4-4452-bb07-06de0c93ab50.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2040.811975] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2040.812305] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb526272-1a2f-4397-b058-673390054588 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.822989] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2040.822989] env[63279]: value = "task-2087334" [ 2040.822989] env[63279]: _type = "Task" [ 2040.822989] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.836171] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087334, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.856683] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "ee1b4746-49ac-425c-8219-4d54cb34abe0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.857064] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "ee1b4746-49ac-425c-8219-4d54cb34abe0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.901408] env[63279]: DEBUG oslo_concurrency.lockutils [req-f3b5d71a-0776-4416-b0a5-547455c2b2fe req-ab3b3b59-8e70-487c-bcb2-77afc7c1b553 service nova] Releasing lock "refresh_cache-5b5f87cb-cf35-418f-b5bd-b953524a285c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.974395] env[63279]: DEBUG oslo_vmware.api [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087333, 'name': PowerOnVM_Task, 'duration_secs': 0.506733} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.974818] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2040.974889] env[63279]: DEBUG nova.compute.manager [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2040.977351] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f720ab7-dfa2-4bc1-abe0-dd0c2cc41289 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.151289] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.559s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.151777] env[63279]: DEBUG nova.compute.manager [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2041.154669] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.188s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.156663] env[63279]: INFO nova.compute.claims [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2041.339027] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087334, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072913} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.339027] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2041.339027] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01aefcb-b213-421a-ab9b-ddcc16f7ce40 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.363021] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 7db0c32d-36a4-4452-bb07-06de0c93ab50/7db0c32d-36a4-4452-bb07-06de0c93ab50.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2041.363407] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00aa585c-f44c-43db-bc7b-7b7067ae804f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.384983] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2041.384983] env[63279]: value = "task-2087335" [ 2041.384983] env[63279]: _type = "Task" [ 2041.384983] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.395817] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087335, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.491430] env[63279]: DEBUG oslo_concurrency.lockutils [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2041.661324] env[63279]: DEBUG nova.compute.utils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2041.663018] env[63279]: DEBUG nova.compute.manager [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2041.663112] env[63279]: DEBUG nova.network.neutron [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2041.728559] env[63279]: DEBUG nova.policy [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab1648b445ab458d92404e3a5ddb8619', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c98de1240f14b058e58f6a707096ef1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2041.896192] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087335, 'name': ReconfigVM_Task, 'duration_secs': 0.322439} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.896493] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 7db0c32d-36a4-4452-bb07-06de0c93ab50/7db0c32d-36a4-4452-bb07-06de0c93ab50.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2041.897161] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-240b9d60-538b-45cf-a550-29affecebc97 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.903292] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2041.903292] env[63279]: value = "task-2087336" [ 2041.903292] env[63279]: _type = "Task" [ 2041.903292] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.911156] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087336, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.069731] env[63279]: DEBUG nova.network.neutron [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Successfully created port: 61682992-df73-44ba-b302-ba8e00c82f95 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2042.168097] env[63279]: DEBUG nova.compute.manager [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2042.202802] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "58392790-b297-4894-8d81-e5cbda69872b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2042.203072] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "58392790-b297-4894-8d81-e5cbda69872b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2042.203349] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "58392790-b297-4894-8d81-e5cbda69872b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2042.203474] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "58392790-b297-4894-8d81-e5cbda69872b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2042.203645] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "58392790-b297-4894-8d81-e5cbda69872b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2042.206531] env[63279]: INFO nova.compute.manager [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Terminating instance [ 2042.415697] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087336, 'name': Rename_Task, 'duration_secs': 0.137528} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.416051] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2042.416938] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b830e62e-3cc3-4de7-b33c-ecad36becc7a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.423690] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2042.423690] env[63279]: value = "task-2087337" [ 2042.423690] env[63279]: _type = "Task" [ 2042.423690] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.433201] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087337, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.639168] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada443ed-4e2f-43fb-899e-c1db8e16cd03 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.647035] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fc5232-e485-4ed1-b9a8-2503f3432481 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.384421] env[63279]: DEBUG nova.compute.manager [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2043.384421] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2043.388018] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.388018] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7f5969-db50-44ad-afb4-1560fc9dac28 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.392256] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.394186] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09f374f-5509-4eac-b356-e5362471c87b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.407663] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2043.407966] env[63279]: DEBUG oslo_vmware.api [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087337, 'name': PowerOnVM_Task, 'duration_secs': 0.462726} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.408265] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74cc2d58-b963-4362-b7ed-c423e513bd6f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.413226] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2043.413226] env[63279]: INFO nova.compute.manager [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Took 8.32 seconds to spawn the instance on the hypervisor. [ 2043.413226] env[63279]: DEBUG nova.compute.manager [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2043.413226] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e8633a-cb1e-41b6-9a1e-0cae92c8acfe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.415271] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31a4397-f03f-43a9-9682-c1753c86486c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.432723] env[63279]: DEBUG nova.compute.provider_tree [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2043.434098] env[63279]: DEBUG oslo_vmware.api [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2043.434098] env[63279]: value = "task-2087338" [ 2043.434098] env[63279]: _type = "Task" [ 2043.434098] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.443811] env[63279]: DEBUG oslo_vmware.api [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087338, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.887426] env[63279]: DEBUG nova.compute.manager [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2043.903120] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.903120] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 2043.917581] env[63279]: DEBUG nova.virt.hardware [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2043.917910] env[63279]: DEBUG nova.virt.hardware [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2043.917984] env[63279]: DEBUG nova.virt.hardware [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2043.918178] env[63279]: DEBUG nova.virt.hardware [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2043.918333] env[63279]: DEBUG nova.virt.hardware [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2043.918483] env[63279]: DEBUG nova.virt.hardware [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2043.918705] env[63279]: DEBUG nova.virt.hardware [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2043.918880] env[63279]: DEBUG nova.virt.hardware [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2043.919088] env[63279]: DEBUG nova.virt.hardware [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2043.919273] env[63279]: DEBUG nova.virt.hardware [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2043.919453] env[63279]: DEBUG nova.virt.hardware [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2043.920374] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61fe8ac9-b982-44c9-9ba2-14c538fc4862 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.933022] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fceb52f9-b46e-40a9-81cd-7e0af22dfbb3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.949284] env[63279]: INFO nova.compute.manager [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Took 37.48 seconds to build instance. [ 2043.964686] env[63279]: DEBUG oslo_vmware.api [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087338, 'name': PowerOffVM_Task, 'duration_secs': 0.243369} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.966067] env[63279]: ERROR nova.scheduler.client.report [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [req-6f1da1db-cc4c-421b-87e5-3f329a0af7f6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6f1da1db-cc4c-421b-87e5-3f329a0af7f6"}]} [ 2043.966067] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2043.966067] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2043.969078] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84198fc0-fca5-4d5f-8eca-0150814e7c93 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.986312] env[63279]: DEBUG nova.scheduler.client.report [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2044.001871] env[63279]: DEBUG nova.scheduler.client.report [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2044.002128] env[63279]: DEBUG nova.compute.provider_tree [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2044.013753] env[63279]: DEBUG nova.scheduler.client.report [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2044.036172] env[63279]: DEBUG nova.scheduler.client.report [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2044.451097] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2044.451368] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquired lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2044.451423] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Forcefully refreshing network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2044.452799] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17f8ab0f-73cf-4901-a75b-de05d0a35bf6 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "7db0c32d-36a4-4452-bb07-06de0c93ab50" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.188s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.456206] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9676bc-63b6-4ca9-90d0-99de932af625 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.465014] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d2cb03-cc1d-446f-8ecf-85745cfe4522 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.495987] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937be264-14e3-4148-9246-5c2027a9992c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.504546] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09bd7511-7c0c-4d25-9db8-d05b522409a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.520029] env[63279]: DEBUG nova.compute.provider_tree [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2044.960274] env[63279]: DEBUG nova.compute.manager [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2045.052284] env[63279]: DEBUG nova.scheduler.client.report [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 92 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2045.052558] env[63279]: DEBUG nova.compute.provider_tree [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 92 to 93 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2045.052741] env[63279]: DEBUG nova.compute.provider_tree [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2045.216649] env[63279]: DEBUG oslo_concurrency.lockutils [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "7db0c32d-36a4-4452-bb07-06de0c93ab50" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2045.216893] env[63279]: DEBUG oslo_concurrency.lockutils [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "7db0c32d-36a4-4452-bb07-06de0c93ab50" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2045.484605] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2045.558024] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.403s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2045.558545] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2045.561298] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.482s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2045.564309] env[63279]: INFO nova.compute.claims [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2045.710306] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating instance_info_cache with network_info: [{"id": "2cc4a33a-bd88-4aec-a588-7c821bebf971", "address": "fa:16:3e:de:77:2e", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc4a33a-bd", "ovs_interfaceid": "2cc4a33a-bd88-4aec-a588-7c821bebf971", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2045.720776] env[63279]: DEBUG nova.compute.utils [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2046.069750] env[63279]: DEBUG nova.compute.utils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2046.071435] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2046.071547] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2046.114431] env[63279]: DEBUG nova.policy [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26c5a007336541009b326b7ba2ee272a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eddf7a9795f344c3be977b267944f1e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2046.214041] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Releasing lock "refresh_cache-5656c853-ac83-47be-83c4-979a9e87ab91" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2046.214041] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updated the network info_cache for instance {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10518}} [ 2046.215549] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2046.215549] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2046.215549] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2046.215549] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2046.215549] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2046.215549] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2046.215820] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 2046.215820] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2046.223277] env[63279]: DEBUG oslo_concurrency.lockutils [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "7db0c32d-36a4-4452-bb07-06de0c93ab50" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2046.381736] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Successfully created port: d7d164bc-29f6-4434-8980-357d7e34c723 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2046.576395] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2046.718936] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2047.024100] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d52a6a-6662-4add-9391-31e8caed8b82 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.031892] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ffaaad-4191-4567-b50d-90f7952f729f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.062412] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915ec062-dfb7-4f2c-b7c8-5e0d6f3147ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.071358] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94cee4e6-5bd2-4d89-b5f8-bb6dd304dd4b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.087705] env[63279]: DEBUG nova.compute.provider_tree [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2047.292986] env[63279]: DEBUG oslo_concurrency.lockutils [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "7db0c32d-36a4-4452-bb07-06de0c93ab50" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2047.293298] env[63279]: DEBUG oslo_concurrency.lockutils [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "7db0c32d-36a4-4452-bb07-06de0c93ab50" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2047.293610] env[63279]: INFO nova.compute.manager [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Attaching volume 6c280eb9-3e63-43e5-841d-7866ee508507 to /dev/sdb [ 2047.326010] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7c1ecd-85be-4d91-a7da-cbe53bd1d788 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.334284] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b215f6b5-84f1-4bf6-a5d2-e236b580168f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.347486] env[63279]: DEBUG nova.virt.block_device [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Updating existing volume attachment record: 8621eee3-06e8-45ea-8db8-8314cb04fa96 {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2047.594652] env[63279]: DEBUG nova.scheduler.client.report [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2047.598632] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2047.626675] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2047.626927] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2047.627100] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2047.627293] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2047.627445] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2047.627597] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2047.627835] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2047.628026] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2047.628223] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2047.628402] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2047.628591] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2047.629515] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7223f00d-b0f4-4b7d-b6f6-9f9e22267629 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.639105] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fb6216-d131-481d-826e-5370e24216dd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.102878] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.541s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2048.104936] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2048.108867] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.419s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2048.110388] env[63279]: INFO nova.compute.claims [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2048.621135] env[63279]: DEBUG nova.compute.utils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2048.622928] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2048.622928] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2048.687969] env[63279]: DEBUG nova.policy [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26c5a007336541009b326b7ba2ee272a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eddf7a9795f344c3be977b267944f1e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2048.707795] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2048.708044] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2048.708239] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleting the datastore file [datastore1] 58392790-b297-4894-8d81-e5cbda69872b {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2048.708524] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27e85f7e-8e60-410c-8306-b9d0c6ab9da6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.715994] env[63279]: DEBUG oslo_vmware.api [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2048.715994] env[63279]: value = "task-2087343" [ 2048.715994] env[63279]: _type = "Task" [ 2048.715994] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.724536] env[63279]: DEBUG oslo_vmware.api [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087343, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.083705] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Successfully created port: 4aff58e1-ec9c-4691-a371-76a7d2e41303 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2049.126618] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2049.230261] env[63279]: DEBUG oslo_vmware.api [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087343, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151907} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.230532] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2049.230779] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2049.231211] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2049.231211] env[63279]: INFO nova.compute.manager [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Took 5.85 seconds to destroy the instance on the hypervisor. [ 2049.231393] env[63279]: DEBUG oslo.service.loopingcall [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2049.231611] env[63279]: DEBUG nova.compute.manager [-] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2049.231707] env[63279]: DEBUG nova.network.neutron [-] [instance: 58392790-b297-4894-8d81-e5cbda69872b] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2049.640336] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6eb06d4-102d-46f9-ab72-7b91a3434336 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.647786] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd725c54-ecb6-4d05-8011-b349017bc4eb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.682234] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ab6f03-d975-42e9-a763-adbefcc34e69 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.687102] env[63279]: DEBUG nova.compute.manager [req-17ccfad8-4aa6-4fdf-a9bf-92af1d86855d req-4c9ef380-46a4-40cd-9cc4-73669b52e13f service nova] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Received event network-vif-plugged-61682992-df73-44ba-b302-ba8e00c82f95 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2049.687340] env[63279]: DEBUG oslo_concurrency.lockutils [req-17ccfad8-4aa6-4fdf-a9bf-92af1d86855d req-4c9ef380-46a4-40cd-9cc4-73669b52e13f service nova] Acquiring lock "b981ac83-6c23-4d44-bd28-12da30d746bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2049.687556] env[63279]: DEBUG oslo_concurrency.lockutils [req-17ccfad8-4aa6-4fdf-a9bf-92af1d86855d req-4c9ef380-46a4-40cd-9cc4-73669b52e13f service nova] Lock "b981ac83-6c23-4d44-bd28-12da30d746bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2049.687732] env[63279]: DEBUG oslo_concurrency.lockutils [req-17ccfad8-4aa6-4fdf-a9bf-92af1d86855d req-4c9ef380-46a4-40cd-9cc4-73669b52e13f service nova] Lock "b981ac83-6c23-4d44-bd28-12da30d746bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2049.687902] env[63279]: DEBUG nova.compute.manager [req-17ccfad8-4aa6-4fdf-a9bf-92af1d86855d req-4c9ef380-46a4-40cd-9cc4-73669b52e13f service nova] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] No waiting events found dispatching network-vif-plugged-61682992-df73-44ba-b302-ba8e00c82f95 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2049.688081] env[63279]: WARNING nova.compute.manager [req-17ccfad8-4aa6-4fdf-a9bf-92af1d86855d req-4c9ef380-46a4-40cd-9cc4-73669b52e13f service nova] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Received unexpected event network-vif-plugged-61682992-df73-44ba-b302-ba8e00c82f95 for instance with vm_state building and task_state spawning. [ 2049.694697] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78854f4b-8489-4935-abbd-2f25f1b8a391 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.709905] env[63279]: DEBUG nova.compute.provider_tree [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2049.764567] env[63279]: DEBUG nova.compute.manager [req-b3d9c21d-64e3-4e5f-a7c4-fc99001bad34 req-8431614a-03c5-4633-8ec2-85c6931332af service nova] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Received event network-vif-deleted-23e2695e-f865-4e3f-9f26-d5bd599cf889 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2049.764947] env[63279]: INFO nova.compute.manager [req-b3d9c21d-64e3-4e5f-a7c4-fc99001bad34 req-8431614a-03c5-4633-8ec2-85c6931332af service nova] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Neutron deleted interface 23e2695e-f865-4e3f-9f26-d5bd599cf889; detaching it from the instance and deleting it from the info cache [ 2049.764947] env[63279]: DEBUG nova.network.neutron [req-b3d9c21d-64e3-4e5f-a7c4-fc99001bad34 req-8431614a-03c5-4633-8ec2-85c6931332af service nova] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2049.895500] env[63279]: DEBUG nova.network.neutron [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Successfully updated port: 61682992-df73-44ba-b302-ba8e00c82f95 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2050.097015] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Successfully updated port: d7d164bc-29f6-4434-8980-357d7e34c723 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2050.144663] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2050.146786] env[63279]: DEBUG nova.network.neutron [-] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2050.177799] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2050.178062] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2050.178229] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2050.178420] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2050.178570] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2050.178723] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2050.178952] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2050.179146] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2050.179334] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2050.179504] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2050.179686] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2050.180902] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8af55fb-a84a-4c72-8ef2-f735312cf305 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.189768] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2edcab-634b-47a2-9f61-5aead72d5bb0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.242102] env[63279]: DEBUG nova.scheduler.client.report [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 93 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2050.242387] env[63279]: DEBUG nova.compute.provider_tree [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 93 to 94 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2050.242577] env[63279]: DEBUG nova.compute.provider_tree [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2050.267741] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be1735f1-473c-467d-9fef-a601096d79a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.279008] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f8278cf-0246-4432-b445-619794349d77 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.311478] env[63279]: DEBUG nova.compute.manager [req-b3d9c21d-64e3-4e5f-a7c4-fc99001bad34 req-8431614a-03c5-4633-8ec2-85c6931332af service nova] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Detach interface failed, port_id=23e2695e-f865-4e3f-9f26-d5bd599cf889, reason: Instance 58392790-b297-4894-8d81-e5cbda69872b could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2050.398431] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2050.398621] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2050.398774] env[63279]: DEBUG nova.network.neutron [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2050.600267] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "refresh_cache-795560b4-ccdc-4012-8130-042dcb94085f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2050.600462] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquired lock "refresh_cache-795560b4-ccdc-4012-8130-042dcb94085f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2050.600626] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2050.649413] env[63279]: INFO nova.compute.manager [-] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Took 1.42 seconds to deallocate network for instance. [ 2050.751077] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.640s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.751077] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2050.752573] env[63279]: DEBUG oslo_concurrency.lockutils [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.504s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.754966] env[63279]: INFO nova.compute.claims [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2050.948178] env[63279]: DEBUG nova.network.neutron [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2050.967220] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Successfully updated port: 4aff58e1-ec9c-4691-a371-76a7d2e41303 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2051.122580] env[63279]: DEBUG nova.network.neutron [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating instance_info_cache with network_info: [{"id": "61682992-df73-44ba-b302-ba8e00c82f95", "address": "fa:16:3e:32:be:0b", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61682992-df", "ovs_interfaceid": "61682992-df73-44ba-b302-ba8e00c82f95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2051.152657] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2051.155990] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.262142] env[63279]: DEBUG nova.compute.utils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2051.268027] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2051.268198] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2051.307978] env[63279]: DEBUG nova.policy [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26c5a007336541009b326b7ba2ee272a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eddf7a9795f344c3be977b267944f1e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2051.345809] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Updating instance_info_cache with network_info: [{"id": "d7d164bc-29f6-4434-8980-357d7e34c723", "address": "fa:16:3e:1a:99:ed", "network": {"id": "61fe1b5f-85f8-4556-ada4-11ea7683f1a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-858560732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eddf7a9795f344c3be977b267944f1e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7d164bc-29", "ovs_interfaceid": "d7d164bc-29f6-4434-8980-357d7e34c723", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2051.470742] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "refresh_cache-cd00cb0e-30e5-4a0c-8612-ea92e5e32edd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2051.470742] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquired lock "refresh_cache-cd00cb0e-30e5-4a0c-8612-ea92e5e32edd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2051.470742] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2051.627251] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2051.627251] env[63279]: DEBUG nova.compute.manager [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Instance network_info: |[{"id": "61682992-df73-44ba-b302-ba8e00c82f95", "address": "fa:16:3e:32:be:0b", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61682992-df", "ovs_interfaceid": "61682992-df73-44ba-b302-ba8e00c82f95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2051.627371] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:be:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61682992-df73-44ba-b302-ba8e00c82f95', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2051.634191] env[63279]: DEBUG oslo.service.loopingcall [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2051.634405] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2051.634628] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7234000c-47f5-4423-a7ad-9fdf9b354f0f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.655900] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2051.655900] env[63279]: value = "task-2087345" [ 2051.655900] env[63279]: _type = "Task" [ 2051.655900] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.663119] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087345, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.704323] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Successfully created port: c7c4391f-94e6-42ba-9742-b3c94c1a79dc {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2051.776979] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2051.813374] env[63279]: DEBUG nova.compute.manager [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Received event network-changed-61682992-df73-44ba-b302-ba8e00c82f95 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2051.813374] env[63279]: DEBUG nova.compute.manager [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Refreshing instance network info cache due to event network-changed-61682992-df73-44ba-b302-ba8e00c82f95. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2051.813374] env[63279]: DEBUG oslo_concurrency.lockutils [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] Acquiring lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2051.813716] env[63279]: DEBUG oslo_concurrency.lockutils [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] Acquired lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2051.813716] env[63279]: DEBUG nova.network.neutron [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Refreshing network info cache for port 61682992-df73-44ba-b302-ba8e00c82f95 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2051.848545] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Releasing lock "refresh_cache-795560b4-ccdc-4012-8130-042dcb94085f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2051.848855] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Instance network_info: |[{"id": "d7d164bc-29f6-4434-8980-357d7e34c723", "address": "fa:16:3e:1a:99:ed", "network": {"id": "61fe1b5f-85f8-4556-ada4-11ea7683f1a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-858560732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eddf7a9795f344c3be977b267944f1e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7d164bc-29", "ovs_interfaceid": "d7d164bc-29f6-4434-8980-357d7e34c723", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2051.849970] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:99:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2b931c4c-f73c-4fbd-9c9f-0270834cc69e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd7d164bc-29f6-4434-8980-357d7e34c723', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2051.857450] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Creating folder: Project (eddf7a9795f344c3be977b267944f1e7). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2051.857608] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7db01bb6-572f-4a9b-9594-86a81ef91e8d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.870103] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Created folder: Project (eddf7a9795f344c3be977b267944f1e7) in parent group-v427491. [ 2051.870337] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Creating folder: Instances. Parent ref: group-v427656. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2051.870578] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad251039-2aa8-4f6d-b5f6-9c910fcbfa43 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.882582] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Created folder: Instances in parent group-v427656. [ 2051.882582] env[63279]: DEBUG oslo.service.loopingcall [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2051.882582] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2051.882760] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1e03608-74dc-4804-b28f-ef1d244523eb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.900707] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2051.900941] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427654', 'volume_id': '6c280eb9-3e63-43e5-841d-7866ee508507', 'name': 'volume-6c280eb9-3e63-43e5-841d-7866ee508507', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7db0c32d-36a4-4452-bb07-06de0c93ab50', 'attached_at': '', 'detached_at': '', 'volume_id': '6c280eb9-3e63-43e5-841d-7866ee508507', 'serial': '6c280eb9-3e63-43e5-841d-7866ee508507'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2051.902290] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fead84d8-dba3-4168-8a65-90d3531889b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.906409] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2051.906409] env[63279]: value = "task-2087348" [ 2051.906409] env[63279]: _type = "Task" [ 2051.906409] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.923229] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df2b6ab-64d7-4518-82e6-fa73bd4e7c0a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.929196] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087348, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.954548] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] volume-6c280eb9-3e63-43e5-841d-7866ee508507/volume-6c280eb9-3e63-43e5-841d-7866ee508507.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2051.958659] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4207d0d5-956e-4ec4-914e-54ffecfcd41a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.973512] env[63279]: DEBUG nova.compute.manager [req-decc00b4-aab1-48d9-829b-d93f1cded5ab req-fb1a026f-5568-42cb-bf77-e0031905dbaf service nova] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Received event network-vif-plugged-4aff58e1-ec9c-4691-a371-76a7d2e41303 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2051.973751] env[63279]: DEBUG oslo_concurrency.lockutils [req-decc00b4-aab1-48d9-829b-d93f1cded5ab req-fb1a026f-5568-42cb-bf77-e0031905dbaf service nova] Acquiring lock "cd00cb0e-30e5-4a0c-8612-ea92e5e32edd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.974342] env[63279]: DEBUG oslo_concurrency.lockutils [req-decc00b4-aab1-48d9-829b-d93f1cded5ab req-fb1a026f-5568-42cb-bf77-e0031905dbaf service nova] Lock "cd00cb0e-30e5-4a0c-8612-ea92e5e32edd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.974678] env[63279]: DEBUG oslo_concurrency.lockutils [req-decc00b4-aab1-48d9-829b-d93f1cded5ab req-fb1a026f-5568-42cb-bf77-e0031905dbaf service nova] Lock "cd00cb0e-30e5-4a0c-8612-ea92e5e32edd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.974745] env[63279]: DEBUG nova.compute.manager [req-decc00b4-aab1-48d9-829b-d93f1cded5ab req-fb1a026f-5568-42cb-bf77-e0031905dbaf service nova] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] No waiting events found dispatching network-vif-plugged-4aff58e1-ec9c-4691-a371-76a7d2e41303 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2051.974895] env[63279]: WARNING nova.compute.manager [req-decc00b4-aab1-48d9-829b-d93f1cded5ab req-fb1a026f-5568-42cb-bf77-e0031905dbaf service nova] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Received unexpected event network-vif-plugged-4aff58e1-ec9c-4691-a371-76a7d2e41303 for instance with vm_state building and task_state spawning. [ 2051.975067] env[63279]: DEBUG nova.compute.manager [req-decc00b4-aab1-48d9-829b-d93f1cded5ab req-fb1a026f-5568-42cb-bf77-e0031905dbaf service nova] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Received event network-changed-4aff58e1-ec9c-4691-a371-76a7d2e41303 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2051.975248] env[63279]: DEBUG nova.compute.manager [req-decc00b4-aab1-48d9-829b-d93f1cded5ab req-fb1a026f-5568-42cb-bf77-e0031905dbaf service nova] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Refreshing instance network info cache due to event network-changed-4aff58e1-ec9c-4691-a371-76a7d2e41303. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2051.975429] env[63279]: DEBUG oslo_concurrency.lockutils [req-decc00b4-aab1-48d9-829b-d93f1cded5ab req-fb1a026f-5568-42cb-bf77-e0031905dbaf service nova] Acquiring lock "refresh_cache-cd00cb0e-30e5-4a0c-8612-ea92e5e32edd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2051.984901] env[63279]: DEBUG oslo_vmware.api [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2051.984901] env[63279]: value = "task-2087349" [ 2051.984901] env[63279]: _type = "Task" [ 2051.984901] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.996948] env[63279]: DEBUG oslo_vmware.api [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087349, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.022688] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2052.170379] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087345, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.240356] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Updating instance_info_cache with network_info: [{"id": "4aff58e1-ec9c-4691-a371-76a7d2e41303", "address": "fa:16:3e:90:2f:aa", "network": {"id": "61fe1b5f-85f8-4556-ada4-11ea7683f1a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-858560732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eddf7a9795f344c3be977b267944f1e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4aff58e1-ec", "ovs_interfaceid": "4aff58e1-ec9c-4691-a371-76a7d2e41303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2052.391163] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b45572-f646-46cf-8b52-064afe1dbd09 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.403269] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42df58f1-c791-403c-87f2-29ab50191c1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.436300] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45db350d-7cf7-4afa-9a88-9316302fad72 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.442030] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087348, 'name': CreateVM_Task, 'duration_secs': 0.352763} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.442220] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2052.442928] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2052.443113] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2052.443481] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2052.445499] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48a061a8-6d56-4d69-8a4a-99cbf8aa063a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.448030] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d691e84-4e8f-483a-80e1-c396f612e429 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.461468] env[63279]: DEBUG nova.compute.provider_tree [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2052.463899] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2052.463899] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52611fcf-3c8c-a0f9-6770-0a01be90f586" [ 2052.463899] env[63279]: _type = "Task" [ 2052.463899] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.473063] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52611fcf-3c8c-a0f9-6770-0a01be90f586, 'name': SearchDatastore_Task, 'duration_secs': 0.00979} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.473354] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2052.473595] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2052.473856] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2052.474045] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2052.474203] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2052.474452] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-689363fe-c11f-4d65-ab2a-4b3902c2e166 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.482854] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2052.483116] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2052.483741] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2e1399a-e644-4b71-8df1-b87965605db3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.491887] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2052.491887] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d710b9-db72-d4cf-d859-bcae54f7d33b" [ 2052.491887] env[63279]: _type = "Task" [ 2052.491887] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.495167] env[63279]: DEBUG oslo_vmware.api [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087349, 'name': ReconfigVM_Task, 'duration_secs': 0.349443} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.498477] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Reconfigured VM instance instance-0000003a to attach disk [datastore1] volume-6c280eb9-3e63-43e5-841d-7866ee508507/volume-6c280eb9-3e63-43e5-841d-7866ee508507.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2052.502828] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d93beed5-4140-4688-85cd-2b92ebb6118b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.522991] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d710b9-db72-d4cf-d859-bcae54f7d33b, 'name': SearchDatastore_Task, 'duration_secs': 0.0093} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.523308] env[63279]: DEBUG oslo_vmware.api [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2052.523308] env[63279]: value = "task-2087350" [ 2052.523308] env[63279]: _type = "Task" [ 2052.523308] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.524077] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ead35817-a880-47c5-a5d4-0ee075707f8c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.533682] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2052.533682] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523f93aa-c299-06d2-15b1-bfe7e17bc1ed" [ 2052.533682] env[63279]: _type = "Task" [ 2052.533682] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.536742] env[63279]: DEBUG oslo_vmware.api [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087350, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.546144] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523f93aa-c299-06d2-15b1-bfe7e17bc1ed, 'name': SearchDatastore_Task, 'duration_secs': 0.009276} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.546418] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2052.546696] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 795560b4-ccdc-4012-8130-042dcb94085f/795560b4-ccdc-4012-8130-042dcb94085f.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2052.547330] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2324d7a-57d3-4af5-8143-a7d92b0792c7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.553680] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2052.553680] env[63279]: value = "task-2087351" [ 2052.553680] env[63279]: _type = "Task" [ 2052.553680] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.561131] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087351, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.628790] env[63279]: DEBUG nova.network.neutron [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updated VIF entry in instance network info cache for port 61682992-df73-44ba-b302-ba8e00c82f95. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2052.629332] env[63279]: DEBUG nova.network.neutron [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating instance_info_cache with network_info: [{"id": "61682992-df73-44ba-b302-ba8e00c82f95", "address": "fa:16:3e:32:be:0b", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61682992-df", "ovs_interfaceid": "61682992-df73-44ba-b302-ba8e00c82f95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2052.667299] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087345, 'name': CreateVM_Task, 'duration_secs': 0.524869} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.667299] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2052.667626] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2052.667720] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2052.668733] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2052.668733] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd2b8d1e-8383-4bd2-9327-978d17f1b11a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.672747] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2052.672747] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a7a579-212f-365c-4790-7b0741e327d1" [ 2052.672747] env[63279]: _type = "Task" [ 2052.672747] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.680640] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a7a579-212f-365c-4790-7b0741e327d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.745243] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Releasing lock "refresh_cache-cd00cb0e-30e5-4a0c-8612-ea92e5e32edd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2052.745610] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Instance network_info: |[{"id": "4aff58e1-ec9c-4691-a371-76a7d2e41303", "address": "fa:16:3e:90:2f:aa", "network": {"id": "61fe1b5f-85f8-4556-ada4-11ea7683f1a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-858560732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eddf7a9795f344c3be977b267944f1e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4aff58e1-ec", "ovs_interfaceid": "4aff58e1-ec9c-4691-a371-76a7d2e41303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2052.745971] env[63279]: DEBUG oslo_concurrency.lockutils [req-decc00b4-aab1-48d9-829b-d93f1cded5ab req-fb1a026f-5568-42cb-bf77-e0031905dbaf service nova] Acquired lock "refresh_cache-cd00cb0e-30e5-4a0c-8612-ea92e5e32edd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2052.746135] env[63279]: DEBUG nova.network.neutron [req-decc00b4-aab1-48d9-829b-d93f1cded5ab req-fb1a026f-5568-42cb-bf77-e0031905dbaf service nova] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Refreshing network info cache for port 4aff58e1-ec9c-4691-a371-76a7d2e41303 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2052.747380] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:2f:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2b931c4c-f73c-4fbd-9c9f-0270834cc69e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4aff58e1-ec9c-4691-a371-76a7d2e41303', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2052.755030] env[63279]: DEBUG oslo.service.loopingcall [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2052.758240] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2052.758772] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f36cb81e-208e-4448-bc68-e70b3ab6eae4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.781022] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2052.781022] env[63279]: value = "task-2087352" [ 2052.781022] env[63279]: _type = "Task" [ 2052.781022] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.789804] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087352, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.792530] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2052.823874] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2052.824144] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2052.824285] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2052.824472] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2052.824624] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2052.824773] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2052.824986] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2052.825170] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2052.825329] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2052.825498] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2052.825672] env[63279]: DEBUG nova.virt.hardware [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2052.826952] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d5eb70-4e3a-49a7-b09e-4c66498978d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.835962] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffd8016-5b27-4c37-bff0-86b8181d84c2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.965800] env[63279]: DEBUG nova.scheduler.client.report [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2053.040336] env[63279]: DEBUG oslo_vmware.api [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087350, 'name': ReconfigVM_Task, 'duration_secs': 0.140065} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.042831] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427654', 'volume_id': '6c280eb9-3e63-43e5-841d-7866ee508507', 'name': 'volume-6c280eb9-3e63-43e5-841d-7866ee508507', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7db0c32d-36a4-4452-bb07-06de0c93ab50', 'attached_at': '', 'detached_at': '', 'volume_id': '6c280eb9-3e63-43e5-841d-7866ee508507', 'serial': '6c280eb9-3e63-43e5-841d-7866ee508507'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2053.066905] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087351, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504027} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.068323] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 795560b4-ccdc-4012-8130-042dcb94085f/795560b4-ccdc-4012-8130-042dcb94085f.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2053.068544] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2053.071937] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a53d3922-a9ea-4eda-b528-770b8615455c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.081255] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2053.081255] env[63279]: value = "task-2087353" [ 2053.081255] env[63279]: _type = "Task" [ 2053.081255] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.093223] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087353, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.094367] env[63279]: DEBUG nova.network.neutron [req-decc00b4-aab1-48d9-829b-d93f1cded5ab req-fb1a026f-5568-42cb-bf77-e0031905dbaf service nova] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Updated VIF entry in instance network info cache for port 4aff58e1-ec9c-4691-a371-76a7d2e41303. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2053.094762] env[63279]: DEBUG nova.network.neutron [req-decc00b4-aab1-48d9-829b-d93f1cded5ab req-fb1a026f-5568-42cb-bf77-e0031905dbaf service nova] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Updating instance_info_cache with network_info: [{"id": "4aff58e1-ec9c-4691-a371-76a7d2e41303", "address": "fa:16:3e:90:2f:aa", "network": {"id": "61fe1b5f-85f8-4556-ada4-11ea7683f1a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-858560732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eddf7a9795f344c3be977b267944f1e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4aff58e1-ec", "ovs_interfaceid": "4aff58e1-ec9c-4691-a371-76a7d2e41303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2053.133117] env[63279]: DEBUG oslo_concurrency.lockutils [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] Releasing lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2053.133117] env[63279]: DEBUG nova.compute.manager [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Received event network-vif-plugged-d7d164bc-29f6-4434-8980-357d7e34c723 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2053.133117] env[63279]: DEBUG oslo_concurrency.lockutils [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] Acquiring lock "795560b4-ccdc-4012-8130-042dcb94085f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2053.133117] env[63279]: DEBUG oslo_concurrency.lockutils [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] Lock "795560b4-ccdc-4012-8130-042dcb94085f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2053.133371] env[63279]: DEBUG oslo_concurrency.lockutils [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] Lock "795560b4-ccdc-4012-8130-042dcb94085f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2053.133440] env[63279]: DEBUG nova.compute.manager [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] No waiting events found dispatching network-vif-plugged-d7d164bc-29f6-4434-8980-357d7e34c723 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2053.133629] env[63279]: WARNING nova.compute.manager [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Received unexpected event network-vif-plugged-d7d164bc-29f6-4434-8980-357d7e34c723 for instance with vm_state building and task_state spawning. [ 2053.133832] env[63279]: DEBUG nova.compute.manager [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Received event network-changed-d7d164bc-29f6-4434-8980-357d7e34c723 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2053.133990] env[63279]: DEBUG nova.compute.manager [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Refreshing instance network info cache due to event network-changed-d7d164bc-29f6-4434-8980-357d7e34c723. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2053.134198] env[63279]: DEBUG oslo_concurrency.lockutils [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] Acquiring lock "refresh_cache-795560b4-ccdc-4012-8130-042dcb94085f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2053.134338] env[63279]: DEBUG oslo_concurrency.lockutils [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] Acquired lock "refresh_cache-795560b4-ccdc-4012-8130-042dcb94085f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2053.134491] env[63279]: DEBUG nova.network.neutron [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Refreshing network info cache for port d7d164bc-29f6-4434-8980-357d7e34c723 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2053.186025] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a7a579-212f-365c-4790-7b0741e327d1, 'name': SearchDatastore_Task, 'duration_secs': 0.009231} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.186025] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2053.186025] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2053.186025] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2053.186421] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2053.186421] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2053.186421] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5dd70a6c-7110-4f82-a016-eefc0a310c33 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.194327] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2053.194505] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2053.195224] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18680868-e433-47a9-8207-fc1c8e996b1f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.200532] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2053.200532] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5234c11e-85d8-56af-161a-b924a57e4c67" [ 2053.200532] env[63279]: _type = "Task" [ 2053.200532] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.209399] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5234c11e-85d8-56af-161a-b924a57e4c67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.295020] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087352, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.363815] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Successfully updated port: c7c4391f-94e6-42ba-9742-b3c94c1a79dc {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2053.474931] env[63279]: DEBUG oslo_concurrency.lockutils [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.722s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2053.475715] env[63279]: DEBUG nova.compute.manager [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2053.479724] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.243s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2053.482495] env[63279]: INFO nova.compute.claims [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2053.577019] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "2f5e22f6-ba70-4848-965b-eb1553115323" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2053.577019] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "2f5e22f6-ba70-4848-965b-eb1553115323" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2053.577019] env[63279]: INFO nova.compute.manager [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Shelving [ 2053.589056] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087353, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070126} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.590051] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2053.590376] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87481a19-4391-4e73-925a-c660c2c93378 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.604932] env[63279]: DEBUG oslo_concurrency.lockutils [req-decc00b4-aab1-48d9-829b-d93f1cded5ab req-fb1a026f-5568-42cb-bf77-e0031905dbaf service nova] Releasing lock "refresh_cache-cd00cb0e-30e5-4a0c-8612-ea92e5e32edd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2053.613658] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 795560b4-ccdc-4012-8130-042dcb94085f/795560b4-ccdc-4012-8130-042dcb94085f.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2053.614198] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abce11a3-b8f2-4a43-a5ce-ab1ad84529c3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.633363] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2053.633363] env[63279]: value = "task-2087354" [ 2053.633363] env[63279]: _type = "Task" [ 2053.633363] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.643565] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087354, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.710263] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5234c11e-85d8-56af-161a-b924a57e4c67, 'name': SearchDatastore_Task, 'duration_secs': 0.009866} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.711176] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0356d47-775a-4157-8e44-0826303bcc18 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.716812] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2053.716812] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523bf5ae-f5a6-fb80-4542-24ced7ea38ff" [ 2053.716812] env[63279]: _type = "Task" [ 2053.716812] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.727009] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523bf5ae-f5a6-fb80-4542-24ced7ea38ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.798670] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087352, 'name': CreateVM_Task, 'duration_secs': 0.587671} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.799022] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2053.799565] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2053.799724] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2053.800047] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2053.801053] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73e052fe-71e6-4b22-bd35-3486c80e88cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.805648] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2053.805648] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52902aa1-3ea5-ba03-618d-4c48c62a5bda" [ 2053.805648] env[63279]: _type = "Task" [ 2053.805648] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.813574] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52902aa1-3ea5-ba03-618d-4c48c62a5bda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.867507] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "refresh_cache-fd9b1666-8e06-4ed0-9187-05a40e136a1d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2053.867646] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquired lock "refresh_cache-fd9b1666-8e06-4ed0-9187-05a40e136a1d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2053.867884] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2053.989867] env[63279]: DEBUG nova.compute.utils [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2053.991752] env[63279]: DEBUG nova.compute.manager [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Not allocating networking since 'none' was specified. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2054.040842] env[63279]: DEBUG nova.compute.manager [req-a8bac9aa-e0f6-4e7d-ae2a-35c621082d6e req-d8a49809-e6db-410d-ab21-c2e2b4a4a7db service nova] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Received event network-vif-plugged-c7c4391f-94e6-42ba-9742-b3c94c1a79dc {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2054.041089] env[63279]: DEBUG oslo_concurrency.lockutils [req-a8bac9aa-e0f6-4e7d-ae2a-35c621082d6e req-d8a49809-e6db-410d-ab21-c2e2b4a4a7db service nova] Acquiring lock "fd9b1666-8e06-4ed0-9187-05a40e136a1d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2054.041306] env[63279]: DEBUG oslo_concurrency.lockutils [req-a8bac9aa-e0f6-4e7d-ae2a-35c621082d6e req-d8a49809-e6db-410d-ab21-c2e2b4a4a7db service nova] Lock "fd9b1666-8e06-4ed0-9187-05a40e136a1d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.041480] env[63279]: DEBUG oslo_concurrency.lockutils [req-a8bac9aa-e0f6-4e7d-ae2a-35c621082d6e req-d8a49809-e6db-410d-ab21-c2e2b4a4a7db service nova] Lock "fd9b1666-8e06-4ed0-9187-05a40e136a1d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2054.041651] env[63279]: DEBUG nova.compute.manager [req-a8bac9aa-e0f6-4e7d-ae2a-35c621082d6e req-d8a49809-e6db-410d-ab21-c2e2b4a4a7db service nova] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] No waiting events found dispatching network-vif-plugged-c7c4391f-94e6-42ba-9742-b3c94c1a79dc {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2054.041817] env[63279]: WARNING nova.compute.manager [req-a8bac9aa-e0f6-4e7d-ae2a-35c621082d6e req-d8a49809-e6db-410d-ab21-c2e2b4a4a7db service nova] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Received unexpected event network-vif-plugged-c7c4391f-94e6-42ba-9742-b3c94c1a79dc for instance with vm_state building and task_state spawning. [ 2054.042073] env[63279]: DEBUG nova.compute.manager [req-a8bac9aa-e0f6-4e7d-ae2a-35c621082d6e req-d8a49809-e6db-410d-ab21-c2e2b4a4a7db service nova] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Received event network-changed-c7c4391f-94e6-42ba-9742-b3c94c1a79dc {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2054.042187] env[63279]: DEBUG nova.compute.manager [req-a8bac9aa-e0f6-4e7d-ae2a-35c621082d6e req-d8a49809-e6db-410d-ab21-c2e2b4a4a7db service nova] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Refreshing instance network info cache due to event network-changed-c7c4391f-94e6-42ba-9742-b3c94c1a79dc. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2054.043326] env[63279]: DEBUG oslo_concurrency.lockutils [req-a8bac9aa-e0f6-4e7d-ae2a-35c621082d6e req-d8a49809-e6db-410d-ab21-c2e2b4a4a7db service nova] Acquiring lock "refresh_cache-fd9b1666-8e06-4ed0-9187-05a40e136a1d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2054.079718] env[63279]: DEBUG nova.objects.instance [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lazy-loading 'flavor' on Instance uuid 7db0c32d-36a4-4452-bb07-06de0c93ab50 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2054.105511] env[63279]: DEBUG nova.network.neutron [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Updated VIF entry in instance network info cache for port d7d164bc-29f6-4434-8980-357d7e34c723. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2054.105871] env[63279]: DEBUG nova.network.neutron [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Updating instance_info_cache with network_info: [{"id": "d7d164bc-29f6-4434-8980-357d7e34c723", "address": "fa:16:3e:1a:99:ed", "network": {"id": "61fe1b5f-85f8-4556-ada4-11ea7683f1a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-858560732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eddf7a9795f344c3be977b267944f1e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7d164bc-29", "ovs_interfaceid": "d7d164bc-29f6-4434-8980-357d7e34c723", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2054.144613] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087354, 'name': ReconfigVM_Task, 'duration_secs': 0.238622} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.144899] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 795560b4-ccdc-4012-8130-042dcb94085f/795560b4-ccdc-4012-8130-042dcb94085f.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2054.145584] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9fb0254b-c2c4-4918-9b54-cba2ddc88d50 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.152021] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2054.152021] env[63279]: value = "task-2087355" [ 2054.152021] env[63279]: _type = "Task" [ 2054.152021] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.160299] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087355, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.226934] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523bf5ae-f5a6-fb80-4542-24ced7ea38ff, 'name': SearchDatastore_Task, 'duration_secs': 0.009713} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.227230] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2054.227529] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] b981ac83-6c23-4d44-bd28-12da30d746bd/b981ac83-6c23-4d44-bd28-12da30d746bd.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2054.227746] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50d4cc33-25d3-4843-806d-03fd21695102 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.234958] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2054.234958] env[63279]: value = "task-2087356" [ 2054.234958] env[63279]: _type = "Task" [ 2054.234958] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.242773] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087356, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.315584] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52902aa1-3ea5-ba03-618d-4c48c62a5bda, 'name': SearchDatastore_Task, 'duration_secs': 0.009795} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.316070] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2054.316179] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2054.316436] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2054.316605] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2054.316805] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2054.317127] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a81c8d97-573b-4676-badb-664b1e54abf9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.324779] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2054.324924] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2054.325672] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5aeae15b-5a79-42d8-8bce-302145fa5a00 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.330637] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2054.330637] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a255ab-6d57-3f2a-b594-0a7751c85886" [ 2054.330637] env[63279]: _type = "Task" [ 2054.330637] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.339890] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a255ab-6d57-3f2a-b594-0a7751c85886, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.404429] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2054.496438] env[63279]: DEBUG nova.compute.manager [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2054.587371] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2054.587659] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4c1c2a0-f6e6-4df7-963c-0d7257d22a3a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.590497] env[63279]: DEBUG oslo_concurrency.lockutils [None req-23acf72a-0ef4-4dcd-a4d1-bf58f42f4af4 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "7db0c32d-36a4-4452-bb07-06de0c93ab50" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.297s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2054.599926] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2054.599926] env[63279]: value = "task-2087357" [ 2054.599926] env[63279]: _type = "Task" [ 2054.599926] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.611721] env[63279]: DEBUG oslo_concurrency.lockutils [req-afbf1c72-4856-4743-85be-469c148e9325 req-3b95187a-6e58-4a6c-b266-2884099b80c0 service nova] Releasing lock "refresh_cache-795560b4-ccdc-4012-8130-042dcb94085f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2054.612176] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087357, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.652251] env[63279]: DEBUG nova.network.neutron [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Updating instance_info_cache with network_info: [{"id": "c7c4391f-94e6-42ba-9742-b3c94c1a79dc", "address": "fa:16:3e:16:39:58", "network": {"id": "61fe1b5f-85f8-4556-ada4-11ea7683f1a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-858560732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eddf7a9795f344c3be977b267944f1e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7c4391f-94", "ovs_interfaceid": "c7c4391f-94e6-42ba-9742-b3c94c1a79dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2054.663264] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087355, 'name': Rename_Task, 'duration_secs': 0.166129} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.665979] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2054.667097] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b0fb6f2-8bc8-4422-9f9e-61af5bb68c56 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.673480] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2054.673480] env[63279]: value = "task-2087358" [ 2054.673480] env[63279]: _type = "Task" [ 2054.673480] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.683256] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087358, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.713155] env[63279]: DEBUG oslo_concurrency.lockutils [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "7db0c32d-36a4-4452-bb07-06de0c93ab50" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2054.713495] env[63279]: DEBUG oslo_concurrency.lockutils [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "7db0c32d-36a4-4452-bb07-06de0c93ab50" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.713707] env[63279]: DEBUG oslo_concurrency.lockutils [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "7db0c32d-36a4-4452-bb07-06de0c93ab50-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2054.713904] env[63279]: DEBUG oslo_concurrency.lockutils [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "7db0c32d-36a4-4452-bb07-06de0c93ab50-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.714091] env[63279]: DEBUG oslo_concurrency.lockutils [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "7db0c32d-36a4-4452-bb07-06de0c93ab50-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2054.716307] env[63279]: INFO nova.compute.manager [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Terminating instance [ 2054.746650] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087356, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496506} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.746922] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] b981ac83-6c23-4d44-bd28-12da30d746bd/b981ac83-6c23-4d44-bd28-12da30d746bd.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2054.747167] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2054.747429] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45e3df4c-30ad-4565-add0-4d45947dd0d4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.755659] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2054.755659] env[63279]: value = "task-2087359" [ 2054.755659] env[63279]: _type = "Task" [ 2054.755659] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.763358] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087359, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.841148] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a255ab-6d57-3f2a-b594-0a7751c85886, 'name': SearchDatastore_Task, 'duration_secs': 0.008476} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.845067] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bb1c0b9-0db8-4af8-b7a9-1dc9dfcf3727 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.850481] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2054.850481] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52773bc3-5485-18ad-26fb-42bcd7e5db4d" [ 2054.850481] env[63279]: _type = "Task" [ 2054.850481] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.860771] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52773bc3-5485-18ad-26fb-42bcd7e5db4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.984753] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd6a13a-3fb8-4149-aab5-943d1db24363 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.991787] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c13eb5-767e-4e39-9191-27e78c1c1f51 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.025582] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44edd336-5c6a-45b4-ad8a-391dcad93ca5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.033048] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4085efa8-f332-41c0-853c-d2f4bb3ae197 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.045928] env[63279]: DEBUG nova.compute.provider_tree [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2055.110633] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087357, 'name': PowerOffVM_Task, 'duration_secs': 0.281493} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.111493] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2055.112052] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138fce3f-bd7d-44d3-aa70-17d81660a2cf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.129377] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f92fd15-0fbe-49ea-91f8-24fa8e27f198 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.158970] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Releasing lock "refresh_cache-fd9b1666-8e06-4ed0-9187-05a40e136a1d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2055.159274] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Instance network_info: |[{"id": "c7c4391f-94e6-42ba-9742-b3c94c1a79dc", "address": "fa:16:3e:16:39:58", "network": {"id": "61fe1b5f-85f8-4556-ada4-11ea7683f1a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-858560732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eddf7a9795f344c3be977b267944f1e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7c4391f-94", "ovs_interfaceid": "c7c4391f-94e6-42ba-9742-b3c94c1a79dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2055.159590] env[63279]: DEBUG oslo_concurrency.lockutils [req-a8bac9aa-e0f6-4e7d-ae2a-35c621082d6e req-d8a49809-e6db-410d-ab21-c2e2b4a4a7db service nova] Acquired lock "refresh_cache-fd9b1666-8e06-4ed0-9187-05a40e136a1d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2055.159775] env[63279]: DEBUG nova.network.neutron [req-a8bac9aa-e0f6-4e7d-ae2a-35c621082d6e req-d8a49809-e6db-410d-ab21-c2e2b4a4a7db service nova] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Refreshing network info cache for port c7c4391f-94e6-42ba-9742-b3c94c1a79dc {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2055.161069] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:39:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2b931c4c-f73c-4fbd-9c9f-0270834cc69e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c7c4391f-94e6-42ba-9742-b3c94c1a79dc', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2055.168676] env[63279]: DEBUG oslo.service.loopingcall [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2055.172064] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2055.172815] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18775795-32d1-45bd-b449-3b2668793d13 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.195122] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087358, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.196251] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2055.196251] env[63279]: value = "task-2087360" [ 2055.196251] env[63279]: _type = "Task" [ 2055.196251] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.203253] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087360, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.222471] env[63279]: DEBUG nova.compute.manager [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2055.222694] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2055.222924] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c243f82a-2f19-49ba-a8c3-f14e236b0070 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.228672] env[63279]: DEBUG oslo_vmware.api [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2055.228672] env[63279]: value = "task-2087361" [ 2055.228672] env[63279]: _type = "Task" [ 2055.228672] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.235993] env[63279]: DEBUG oslo_vmware.api [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087361, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.265785] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087359, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078365} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.266046] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2055.266783] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95166f04-0639-40b7-9f15-fc13714df60b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.289483] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] b981ac83-6c23-4d44-bd28-12da30d746bd/b981ac83-6c23-4d44-bd28-12da30d746bd.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2055.291915] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-add99aa4-c05b-4eed-83e6-10de2b0c9d5c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.311736] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2055.311736] env[63279]: value = "task-2087362" [ 2055.311736] env[63279]: _type = "Task" [ 2055.311736] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.321613] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087362, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.360517] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52773bc3-5485-18ad-26fb-42bcd7e5db4d, 'name': SearchDatastore_Task, 'duration_secs': 0.02203} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.360797] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2055.361076] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] cd00cb0e-30e5-4a0c-8612-ea92e5e32edd/cd00cb0e-30e5-4a0c-8612-ea92e5e32edd.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2055.361343] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76b34b0c-d6c9-4e59-bb1e-05ad3f1d7bb4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.368150] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2055.368150] env[63279]: value = "task-2087363" [ 2055.368150] env[63279]: _type = "Task" [ 2055.368150] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.376896] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087363, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.432658] env[63279]: DEBUG nova.network.neutron [req-a8bac9aa-e0f6-4e7d-ae2a-35c621082d6e req-d8a49809-e6db-410d-ab21-c2e2b4a4a7db service nova] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Updated VIF entry in instance network info cache for port c7c4391f-94e6-42ba-9742-b3c94c1a79dc. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2055.433176] env[63279]: DEBUG nova.network.neutron [req-a8bac9aa-e0f6-4e7d-ae2a-35c621082d6e req-d8a49809-e6db-410d-ab21-c2e2b4a4a7db service nova] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Updating instance_info_cache with network_info: [{"id": "c7c4391f-94e6-42ba-9742-b3c94c1a79dc", "address": "fa:16:3e:16:39:58", "network": {"id": "61fe1b5f-85f8-4556-ada4-11ea7683f1a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-858560732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eddf7a9795f344c3be977b267944f1e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7c4391f-94", "ovs_interfaceid": "c7c4391f-94e6-42ba-9742-b3c94c1a79dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2055.529478] env[63279]: DEBUG nova.compute.manager [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2055.566874] env[63279]: DEBUG nova.virt.hardware [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2055.566874] env[63279]: DEBUG nova.virt.hardware [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2055.566874] env[63279]: DEBUG nova.virt.hardware [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2055.566874] env[63279]: DEBUG nova.virt.hardware [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2055.567227] env[63279]: DEBUG nova.virt.hardware [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2055.567529] env[63279]: DEBUG nova.virt.hardware [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2055.568079] env[63279]: DEBUG nova.virt.hardware [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2055.568320] env[63279]: DEBUG nova.virt.hardware [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2055.568588] env[63279]: DEBUG nova.virt.hardware [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2055.569366] env[63279]: DEBUG nova.virt.hardware [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2055.569366] env[63279]: DEBUG nova.virt.hardware [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2055.570083] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af36bfa-40f9-4fd0-a474-ac8f9c8eeeb8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.578648] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6807e61c-dcf8-453f-b0ec-bcb3cf01617f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.595189] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Instance VIF info [] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2055.600863] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Creating folder: Project (6c67cf51b1ae45e3a394684fad8b46ce). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2055.602189] env[63279]: DEBUG nova.scheduler.client.report [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 94 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2055.602456] env[63279]: DEBUG nova.compute.provider_tree [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 94 to 95 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2055.602645] env[63279]: DEBUG nova.compute.provider_tree [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2055.606179] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e011a1bb-9530-4d34-98ba-a11f86270a01 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.616543] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Created folder: Project (6c67cf51b1ae45e3a394684fad8b46ce) in parent group-v427491. [ 2055.616774] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Creating folder: Instances. Parent ref: group-v427661. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2055.617049] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9cd7908-65e6-4712-be9a-b9293c728ad5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.626427] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Created folder: Instances in parent group-v427661. [ 2055.626697] env[63279]: DEBUG oslo.service.loopingcall [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2055.626900] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2055.627444] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3fd33765-285f-44b2-8c43-703bcbd05cfa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.642223] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2055.642558] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e8b17471-efac-40c4-a248-050ad2ac4ee8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.649045] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2055.649045] env[63279]: value = "task-2087366" [ 2055.649045] env[63279]: _type = "Task" [ 2055.649045] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.654110] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2055.654110] env[63279]: value = "task-2087367" [ 2055.654110] env[63279]: _type = "Task" [ 2055.654110] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.660856] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087366, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.666718] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087367, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.697687] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087358, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.709285] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087360, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.743959] env[63279]: DEBUG oslo_vmware.api [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087361, 'name': PowerOffVM_Task, 'duration_secs': 0.278351} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.744381] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2055.745076] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2055.745076] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427654', 'volume_id': '6c280eb9-3e63-43e5-841d-7866ee508507', 'name': 'volume-6c280eb9-3e63-43e5-841d-7866ee508507', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7db0c32d-36a4-4452-bb07-06de0c93ab50', 'attached_at': '', 'detached_at': '', 'volume_id': '6c280eb9-3e63-43e5-841d-7866ee508507', 'serial': '6c280eb9-3e63-43e5-841d-7866ee508507'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2055.745607] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075ce754-5c60-449e-a61c-048e1ff3ab99 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.772584] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0e6e7f-cc27-4f1f-88f7-fa0465faf765 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.781020] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6691cb-02bf-4e51-9e0c-6b2ff8f717f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.801682] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d13b6b-7d92-4f35-88f0-cc68c9f5b2e9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.818378] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] The volume has not been displaced from its original location: [datastore1] volume-6c280eb9-3e63-43e5-841d-7866ee508507/volume-6c280eb9-3e63-43e5-841d-7866ee508507.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2055.824867] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Reconfiguring VM instance instance-0000003a to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2055.827236] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5767d97-a701-4fef-b496-28d6f269c3ac {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.846474] env[63279]: DEBUG oslo_vmware.api [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2055.846474] env[63279]: value = "task-2087368" [ 2055.846474] env[63279]: _type = "Task" [ 2055.846474] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.849553] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087362, 'name': ReconfigVM_Task, 'duration_secs': 0.289597} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.852781] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Reconfigured VM instance instance-0000003b to attach disk [datastore1] b981ac83-6c23-4d44-bd28-12da30d746bd/b981ac83-6c23-4d44-bd28-12da30d746bd.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2055.853445] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6f15652b-a0c2-4e7a-8b31-39d28e6a0e03 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.860322] env[63279]: DEBUG oslo_vmware.api [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087368, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.864190] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2055.864190] env[63279]: value = "task-2087369" [ 2055.864190] env[63279]: _type = "Task" [ 2055.864190] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.876290] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087369, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.883860] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087363, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.935854] env[63279]: DEBUG oslo_concurrency.lockutils [req-a8bac9aa-e0f6-4e7d-ae2a-35c621082d6e req-d8a49809-e6db-410d-ab21-c2e2b4a4a7db service nova] Releasing lock "refresh_cache-fd9b1666-8e06-4ed0-9187-05a40e136a1d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2056.109566] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.630s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2056.110124] env[63279]: DEBUG nova.compute.manager [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2056.113149] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 37.397s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2056.163386] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087366, 'name': CreateVM_Task, 'duration_secs': 0.346054} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.163950] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2056.164441] env[63279]: DEBUG oslo_concurrency.lockutils [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2056.164613] env[63279]: DEBUG oslo_concurrency.lockutils [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2056.164967] env[63279]: DEBUG oslo_concurrency.lockutils [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2056.165258] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45af50c4-89c3-4791-b4a8-a2b052069103 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.169705] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087367, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.174324] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2056.174324] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c835bc-a7b2-039c-351f-10a26c4c129e" [ 2056.174324] env[63279]: _type = "Task" [ 2056.174324] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.185157] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c835bc-a7b2-039c-351f-10a26c4c129e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.195671] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087358, 'name': PowerOnVM_Task, 'duration_secs': 1.261209} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.195935] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2056.196155] env[63279]: INFO nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Took 8.60 seconds to spawn the instance on the hypervisor. [ 2056.196335] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2056.197097] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72af168-e97d-4ac0-a6ea-c5229d20dd23 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.210287] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087360, 'name': CreateVM_Task, 'duration_secs': 0.580923} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.211399] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2056.214700] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2056.359142] env[63279]: DEBUG oslo_vmware.api [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087368, 'name': ReconfigVM_Task, 'duration_secs': 0.392978} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.359475] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Reconfigured VM instance instance-0000003a to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2056.364256] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71061275-ca67-434d-a469-565dab566e19 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.389221] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087363, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54584} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.393067] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] cd00cb0e-30e5-4a0c-8612-ea92e5e32edd/cd00cb0e-30e5-4a0c-8612-ea92e5e32edd.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2056.393319] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2056.393654] env[63279]: DEBUG oslo_vmware.api [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2056.393654] env[63279]: value = "task-2087370" [ 2056.393654] env[63279]: _type = "Task" [ 2056.393654] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.393862] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087369, 'name': Rename_Task, 'duration_secs': 0.181957} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.394079] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eac79cdd-8c40-47f4-8fb5-3cc162b0dcf1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.396029] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2056.399099] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e505673b-fec4-4786-bf0e-bed9309a8883 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.405872] env[63279]: DEBUG oslo_vmware.api [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087370, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.407913] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2056.407913] env[63279]: value = "task-2087371" [ 2056.407913] env[63279]: _type = "Task" [ 2056.407913] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.408586] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2056.408586] env[63279]: value = "task-2087372" [ 2056.408586] env[63279]: _type = "Task" [ 2056.408586] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.420488] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087372, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.425032] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087371, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.616874] env[63279]: DEBUG nova.compute.utils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2056.618368] env[63279]: DEBUG nova.compute.manager [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2056.618533] env[63279]: DEBUG nova.network.neutron [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2056.622721] env[63279]: INFO nova.compute.claims [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2056.665235] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087367, 'name': CreateSnapshot_Task, 'duration_secs': 0.97908} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.665501] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2056.666286] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d4d717-a56f-4072-8113-2407fdf9d476 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.685029] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c835bc-a7b2-039c-351f-10a26c4c129e, 'name': SearchDatastore_Task, 'duration_secs': 0.034911} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.685149] env[63279]: DEBUG oslo_concurrency.lockutils [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2056.685298] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2056.685563] env[63279]: DEBUG oslo_concurrency.lockutils [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2056.685718] env[63279]: DEBUG oslo_concurrency.lockutils [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2056.685899] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2056.686193] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2056.686548] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2056.686802] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-411be457-5fc8-422a-81fe-2a9740bd2304 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.688830] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5047d67c-3b81-4add-b183-197bdae17c30 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.694136] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2056.694136] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]524d0d0b-607f-660b-a764-3b30200bd647" [ 2056.694136] env[63279]: _type = "Task" [ 2056.694136] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.699245] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2056.699486] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2056.703308] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-931b1199-3fba-4ce4-a8ba-5f2408716f9a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.705768] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524d0d0b-607f-660b-a764-3b30200bd647, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.707355] env[63279]: DEBUG nova.policy [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d273c47f9cc4928a396c459dc0bb006', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '131db3d2daa24712b6e11592cf789b33', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2056.711840] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2056.711840] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52374a7a-c5c8-2614-b403-cf7da01eaa72" [ 2056.711840] env[63279]: _type = "Task" [ 2056.711840] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.725281] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52374a7a-c5c8-2614-b403-cf7da01eaa72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.725737] env[63279]: INFO nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Took 43.78 seconds to build instance. [ 2056.906872] env[63279]: DEBUG oslo_vmware.api [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087370, 'name': ReconfigVM_Task, 'duration_secs': 0.190534} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.907205] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427654', 'volume_id': '6c280eb9-3e63-43e5-841d-7866ee508507', 'name': 'volume-6c280eb9-3e63-43e5-841d-7866ee508507', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7db0c32d-36a4-4452-bb07-06de0c93ab50', 'attached_at': '', 'detached_at': '', 'volume_id': '6c280eb9-3e63-43e5-841d-7866ee508507', 'serial': '6c280eb9-3e63-43e5-841d-7866ee508507'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2056.907506] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2056.908289] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f173dd-5e3b-42ad-a9fd-40b6baab601b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.927659] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2056.927991] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087372, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11109} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.931424] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32c60f11-b13e-4ad5-80cf-05844810bbfd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.933835] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2056.934157] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087371, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.934854] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d87493-8c27-49fe-8c5d-8a913ef8dacc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.960773] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] cd00cb0e-30e5-4a0c-8612-ea92e5e32edd/cd00cb0e-30e5-4a0c-8612-ea92e5e32edd.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2056.961099] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df298d92-bbe5-423e-9224-145ce076b11c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.981137] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2056.981137] env[63279]: value = "task-2087374" [ 2056.981137] env[63279]: _type = "Task" [ 2056.981137] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.989332] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087374, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.092732] env[63279]: DEBUG nova.network.neutron [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Successfully created port: cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2057.127739] env[63279]: DEBUG nova.compute.manager [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2057.132584] env[63279]: INFO nova.compute.resource_tracker [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating resource usage from migration 6396462b-1d31-43f6-bcb9-97273bcbac6a [ 2057.187489] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2057.188286] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-98bc1ad0-f4f4-4873-8950-de60be831ae7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.201579] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2057.201579] env[63279]: value = "task-2087375" [ 2057.201579] env[63279]: _type = "Task" [ 2057.201579] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.205602] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524d0d0b-607f-660b-a764-3b30200bd647, 'name': SearchDatastore_Task, 'duration_secs': 0.016772} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.211099] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2057.211430] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2057.211565] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2057.219256] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087375, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.224709] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52374a7a-c5c8-2614-b403-cf7da01eaa72, 'name': SearchDatastore_Task, 'duration_secs': 0.023409} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.228016] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "795560b4-ccdc-4012-8130-042dcb94085f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.633s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2057.228398] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d061dcf-6e4b-4732-9c7b-fe5694afe8a8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.233897] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2057.234104] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2057.234451] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleting the datastore file [datastore1] 7db0c32d-36a4-4452-bb07-06de0c93ab50 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2057.234733] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6e580e1-934c-4c11-98e6-6c5c12d5c73e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.237522] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2057.237522] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d9820c-c101-78c8-2ec4-548150d5dec2" [ 2057.237522] env[63279]: _type = "Task" [ 2057.237522] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.244953] env[63279]: DEBUG oslo_vmware.api [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2057.244953] env[63279]: value = "task-2087376" [ 2057.244953] env[63279]: _type = "Task" [ 2057.244953] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.252751] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d9820c-c101-78c8-2ec4-548150d5dec2, 'name': SearchDatastore_Task, 'duration_secs': 0.012581} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.253110] env[63279]: DEBUG oslo_concurrency.lockutils [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2057.253396] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6699de0a-b3f8-4d84-9c9b-d0f6899a606e/6699de0a-b3f8-4d84-9c9b-d0f6899a606e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2057.253669] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2057.254365] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2057.254365] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-abbe4b22-553e-48a4-bfc6-457bbbfb69bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.258595] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c26ef4f5-93cf-48d0-aa19-4a11362f654c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.261025] env[63279]: DEBUG oslo_vmware.api [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087376, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.269354] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2057.269354] env[63279]: value = "task-2087377" [ 2057.269354] env[63279]: _type = "Task" [ 2057.269354] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.281169] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087377, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.282481] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2057.282481] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2057.282865] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bac6d97-c828-41b8-b908-725606cc76b1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.288843] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2057.288843] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]524d5254-bc0e-7917-ec0a-ffb83beb7476" [ 2057.288843] env[63279]: _type = "Task" [ 2057.288843] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.298114] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524d5254-bc0e-7917-ec0a-ffb83beb7476, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.421652] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087371, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.492033] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087374, 'name': ReconfigVM_Task, 'duration_secs': 0.32097} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.496562] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Reconfigured VM instance instance-0000003d to attach disk [datastore1] cd00cb0e-30e5-4a0c-8612-ea92e5e32edd/cd00cb0e-30e5-4a0c-8612-ea92e5e32edd.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2057.497537] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a9d5401-319e-43c4-ac66-fd313e86782b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.506114] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2057.506114] env[63279]: value = "task-2087378" [ 2057.506114] env[63279]: _type = "Task" [ 2057.506114] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.525476] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087378, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.678057] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9831f5e-5f83-4c67-b209-e7ddcd9235e8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.688175] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc973b5-a1c1-4cdd-accc-5fb72368c810 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.724051] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14dc1bc0-757e-4ba3-9a4c-9fddc4cbc8af {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.731965] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087375, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.735714] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de178a0f-e06d-459c-8972-957c7f0e26f9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.739927] env[63279]: DEBUG nova.compute.manager [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2057.759587] env[63279]: DEBUG nova.compute.provider_tree [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2057.766481] env[63279]: DEBUG oslo_vmware.api [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087376, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184017} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.766744] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2057.766930] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2057.767139] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2057.767315] env[63279]: INFO nova.compute.manager [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Took 2.54 seconds to destroy the instance on the hypervisor. [ 2057.767567] env[63279]: DEBUG oslo.service.loopingcall [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2057.767765] env[63279]: DEBUG nova.compute.manager [-] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2057.767849] env[63279]: DEBUG nova.network.neutron [-] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2057.779802] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087377, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.803377] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524d5254-bc0e-7917-ec0a-ffb83beb7476, 'name': SearchDatastore_Task, 'duration_secs': 0.011331} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.804675] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a776157a-1791-4a1e-a17f-92a830cead83 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.814270] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2057.814270] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523357e7-e02f-9dd1-e5ca-9807a70cf8bc" [ 2057.814270] env[63279]: _type = "Task" [ 2057.814270] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.826600] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523357e7-e02f-9dd1-e5ca-9807a70cf8bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.921570] env[63279]: DEBUG oslo_vmware.api [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087371, 'name': PowerOnVM_Task, 'duration_secs': 1.134271} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.921943] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2057.922093] env[63279]: INFO nova.compute.manager [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Took 14.03 seconds to spawn the instance on the hypervisor. [ 2057.922239] env[63279]: DEBUG nova.compute.manager [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2057.922993] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5b7040-a52c-4f0d-aa1f-d35b22448592 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.016521] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087378, 'name': Rename_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.142273] env[63279]: DEBUG nova.compute.manager [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2058.183443] env[63279]: DEBUG nova.virt.hardware [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2058.183857] env[63279]: DEBUG nova.virt.hardware [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2058.184121] env[63279]: DEBUG nova.virt.hardware [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2058.184464] env[63279]: DEBUG nova.virt.hardware [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2058.184714] env[63279]: DEBUG nova.virt.hardware [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2058.185431] env[63279]: DEBUG nova.virt.hardware [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2058.185431] env[63279]: DEBUG nova.virt.hardware [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2058.185627] env[63279]: DEBUG nova.virt.hardware [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2058.186302] env[63279]: DEBUG nova.virt.hardware [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2058.186554] env[63279]: DEBUG nova.virt.hardware [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2058.186794] env[63279]: DEBUG nova.virt.hardware [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2058.187728] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5b95ef-555e-4a63-900f-f49b61f0db1c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.197450] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45676e5f-ebea-4f5f-b25d-1d20e7c1a2ed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.229269] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087375, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.262328] env[63279]: DEBUG oslo_concurrency.lockutils [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2058.263237] env[63279]: DEBUG nova.scheduler.client.report [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2058.279860] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087377, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.583701} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.280770] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6699de0a-b3f8-4d84-9c9b-d0f6899a606e/6699de0a-b3f8-4d84-9c9b-d0f6899a606e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2058.281016] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2058.281282] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0fe094f-c865-494a-9533-01e4d5f04289 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.288374] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2058.288374] env[63279]: value = "task-2087379" [ 2058.288374] env[63279]: _type = "Task" [ 2058.288374] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.296890] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087379, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.326582] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523357e7-e02f-9dd1-e5ca-9807a70cf8bc, 'name': SearchDatastore_Task, 'duration_secs': 0.022023} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.326890] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2058.327162] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] fd9b1666-8e06-4ed0-9187-05a40e136a1d/fd9b1666-8e06-4ed0-9187-05a40e136a1d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2058.327429] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9906d55-ee76-4b59-bea4-20a58f4df2e5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.334153] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2058.334153] env[63279]: value = "task-2087380" [ 2058.334153] env[63279]: _type = "Task" [ 2058.334153] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.341753] env[63279]: DEBUG nova.compute.manager [req-4be8623e-8eb7-4b7c-b13c-2be60321cb03 req-7ebe65f2-67a2-4567-98fd-1cc72140b44f service nova] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Received event network-vif-deleted-5518a04b-dc37-4dc2-89d4-059d6e54f634 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2058.341936] env[63279]: INFO nova.compute.manager [req-4be8623e-8eb7-4b7c-b13c-2be60321cb03 req-7ebe65f2-67a2-4567-98fd-1cc72140b44f service nova] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Neutron deleted interface 5518a04b-dc37-4dc2-89d4-059d6e54f634; detaching it from the instance and deleting it from the info cache [ 2058.342149] env[63279]: DEBUG nova.network.neutron [req-4be8623e-8eb7-4b7c-b13c-2be60321cb03 req-7ebe65f2-67a2-4567-98fd-1cc72140b44f service nova] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2058.348574] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087380, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.441567] env[63279]: INFO nova.compute.manager [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Took 46.13 seconds to build instance. [ 2058.517919] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087378, 'name': Rename_Task, 'duration_secs': 0.651445} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.518216] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2058.518471] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3d265fa-ac76-4876-9630-c6d30fe97967 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.525037] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2058.525037] env[63279]: value = "task-2087381" [ 2058.525037] env[63279]: _type = "Task" [ 2058.525037] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.533566] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087381, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.735112] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087375, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.769522] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.656s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2058.769795] env[63279]: INFO nova.compute.manager [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Migrating [ 2058.780024] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.698s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.780024] env[63279]: DEBUG nova.objects.instance [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lazy-loading 'resources' on Instance uuid 1b2ca21b-feea-4fc1-9ddc-99f144e4241a {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2058.800286] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087379, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123363} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.800439] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2058.801208] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47e85c2-bc3e-42b2-a8dd-0a1663c28aea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.815501] env[63279]: DEBUG nova.network.neutron [-] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2058.825462] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 6699de0a-b3f8-4d84-9c9b-d0f6899a606e/6699de0a-b3f8-4d84-9c9b-d0f6899a606e.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2058.826797] env[63279]: INFO nova.compute.manager [-] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Took 1.06 seconds to deallocate network for instance. [ 2058.827244] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-032f2f8d-7f86-41c2-b7e7-7b7c57c71d9c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.847967] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c38f3b2-c6d1-4b1f-8d12-010cd2724ff4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.854472] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2058.854472] env[63279]: value = "task-2087382" [ 2058.854472] env[63279]: _type = "Task" [ 2058.854472] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.857583] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087380, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.864183] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108234a7-427e-4116-898c-2b5078a2644b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.886582] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087382, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.904308] env[63279]: DEBUG nova.compute.manager [req-4be8623e-8eb7-4b7c-b13c-2be60321cb03 req-7ebe65f2-67a2-4567-98fd-1cc72140b44f service nova] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Detach interface failed, port_id=5518a04b-dc37-4dc2-89d4-059d6e54f634, reason: Instance 7db0c32d-36a4-4452-bb07-06de0c93ab50 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2058.943946] env[63279]: DEBUG oslo_concurrency.lockutils [None req-80ef8ca7-1c6d-429c-800c-69ee59053ed2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "b981ac83-6c23-4d44-bd28-12da30d746bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.710s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2059.036278] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087381, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.132274] env[63279]: DEBUG nova.compute.manager [req-1b67ae0e-d6fc-4074-aadb-f8eced2ef899 req-bb69ab6a-d207-45da-8b46-e6b6d04545e7 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Received event network-vif-plugged-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2059.132522] env[63279]: DEBUG oslo_concurrency.lockutils [req-1b67ae0e-d6fc-4074-aadb-f8eced2ef899 req-bb69ab6a-d207-45da-8b46-e6b6d04545e7 service nova] Acquiring lock "a15141bc-424d-48ca-a6d5-c859a3639a0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2059.132740] env[63279]: DEBUG oslo_concurrency.lockutils [req-1b67ae0e-d6fc-4074-aadb-f8eced2ef899 req-bb69ab6a-d207-45da-8b46-e6b6d04545e7 service nova] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2059.132931] env[63279]: DEBUG oslo_concurrency.lockutils [req-1b67ae0e-d6fc-4074-aadb-f8eced2ef899 req-bb69ab6a-d207-45da-8b46-e6b6d04545e7 service nova] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2059.133170] env[63279]: DEBUG nova.compute.manager [req-1b67ae0e-d6fc-4074-aadb-f8eced2ef899 req-bb69ab6a-d207-45da-8b46-e6b6d04545e7 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] No waiting events found dispatching network-vif-plugged-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2059.133529] env[63279]: WARNING nova.compute.manager [req-1b67ae0e-d6fc-4074-aadb-f8eced2ef899 req-bb69ab6a-d207-45da-8b46-e6b6d04545e7 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Received unexpected event network-vif-plugged-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce for instance with vm_state building and task_state spawning. [ 2059.231939] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087375, 'name': CloneVM_Task} progress is 95%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.258608] env[63279]: DEBUG nova.network.neutron [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Successfully updated port: cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2059.292336] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2059.292738] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2059.292738] env[63279]: DEBUG nova.network.neutron [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2059.354355] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087380, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544233} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.357800] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] fd9b1666-8e06-4ed0-9187-05a40e136a1d/fd9b1666-8e06-4ed0-9187-05a40e136a1d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2059.357800] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2059.358350] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bed8271-a21a-400c-80f6-048a7ba28409 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.373677] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087382, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.375077] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2059.375077] env[63279]: value = "task-2087383" [ 2059.375077] env[63279]: _type = "Task" [ 2059.375077] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.385198] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087383, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.413556] env[63279]: INFO nova.compute.manager [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Took 0.57 seconds to detach 1 volumes for instance. [ 2059.447443] env[63279]: DEBUG nova.compute.manager [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2059.539564] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087381, 'name': PowerOnVM_Task, 'duration_secs': 0.755282} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.539900] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2059.540162] env[63279]: INFO nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Took 9.40 seconds to spawn the instance on the hypervisor. [ 2059.540385] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2059.541275] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc16cf6-5b81-4447-a7cf-fd7191e4aa42 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.734314] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087375, 'name': CloneVM_Task, 'duration_secs': 2.255245} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.734794] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Created linked-clone VM from snapshot [ 2059.735660] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9fdbc9-8590-4770-b1f4-43dd5b0d9809 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.745839] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Uploading image 9d99d2f6-606a-4ba2-a64b-746ca6b6b68e {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2059.765796] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2059.765796] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2059.765796] env[63279]: DEBUG nova.network.neutron [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2059.782905] env[63279]: DEBUG oslo_vmware.rw_handles [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2059.782905] env[63279]: value = "vm-427665" [ 2059.782905] env[63279]: _type = "VirtualMachine" [ 2059.782905] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2059.784429] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-68af8283-bdfe-43ba-ad17-92d27194097b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.797249] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11a2b33-eaa1-49d5-bd62-0b8a8f4daeba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.805288] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3ba848e-4b1a-4e5c-ab0b-78c003676cb7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.838511] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea903c2-2997-4706-8335-7b47049db3eb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.841332] env[63279]: DEBUG oslo_vmware.rw_handles [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lease: (returnval){ [ 2059.841332] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52219913-7a37-bd64-ef6e-f2b077cf83aa" [ 2059.841332] env[63279]: _type = "HttpNfcLease" [ 2059.841332] env[63279]: } obtained for exporting VM: (result){ [ 2059.841332] env[63279]: value = "vm-427665" [ 2059.841332] env[63279]: _type = "VirtualMachine" [ 2059.841332] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2059.841591] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the lease: (returnval){ [ 2059.841591] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52219913-7a37-bd64-ef6e-f2b077cf83aa" [ 2059.841591] env[63279]: _type = "HttpNfcLease" [ 2059.841591] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2059.850829] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc37878-a88c-44db-ae85-0eb3a0059235 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.855709] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2059.855709] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52219913-7a37-bd64-ef6e-f2b077cf83aa" [ 2059.855709] env[63279]: _type = "HttpNfcLease" [ 2059.855709] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2059.867273] env[63279]: DEBUG nova.compute.provider_tree [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2059.876991] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087382, 'name': ReconfigVM_Task, 'duration_secs': 0.688424} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.880744] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 6699de0a-b3f8-4d84-9c9b-d0f6899a606e/6699de0a-b3f8-4d84-9c9b-d0f6899a606e.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2059.881593] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf2e0832-b420-404f-8546-8670973a94b2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.889374] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087383, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097005} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.890637] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2059.890998] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2059.890998] env[63279]: value = "task-2087385" [ 2059.890998] env[63279]: _type = "Task" [ 2059.890998] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.891721] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1509f85-15c3-44f3-b617-326c62d45ee3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.921447] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] fd9b1666-8e06-4ed0-9187-05a40e136a1d/fd9b1666-8e06-4ed0-9187-05a40e136a1d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2059.921798] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087385, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.923008] env[63279]: DEBUG oslo_concurrency.lockutils [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2059.923268] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d91ad74d-cbda-4860-8044-2426e98c5de4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.942439] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2059.942439] env[63279]: value = "task-2087386" [ 2059.942439] env[63279]: _type = "Task" [ 2059.942439] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.955375] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087386, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.972621] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2060.055067] env[63279]: DEBUG nova.network.neutron [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance_info_cache with network_info: [{"id": "cbbfde33-a0b6-4403-8a1e-d688a0a7147b", "address": "fa:16:3e:47:7e:05", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbbfde33-a0", "ovs_interfaceid": "cbbfde33-a0b6-4403-8a1e-d688a0a7147b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2060.063588] env[63279]: INFO nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Took 46.00 seconds to build instance. [ 2060.311141] env[63279]: DEBUG nova.network.neutron [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2060.351952] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2060.351952] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52219913-7a37-bd64-ef6e-f2b077cf83aa" [ 2060.351952] env[63279]: _type = "HttpNfcLease" [ 2060.351952] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2060.352262] env[63279]: DEBUG oslo_vmware.rw_handles [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2060.352262] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52219913-7a37-bd64-ef6e-f2b077cf83aa" [ 2060.352262] env[63279]: _type = "HttpNfcLease" [ 2060.352262] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2060.353011] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0735f56-f243-47f0-8f00-f1b26469fcb8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.360762] env[63279]: DEBUG oslo_vmware.rw_handles [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528efa05-1c00-c51a-f9e8-22b5bede4f48/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2060.360907] env[63279]: DEBUG oslo_vmware.rw_handles [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528efa05-1c00-c51a-f9e8-22b5bede4f48/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2060.421980] env[63279]: DEBUG nova.scheduler.client.report [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2060.438479] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087385, 'name': Rename_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.454942] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087386, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.487621] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c355807d-7908-49fd-afa4-610471227425 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.548216] env[63279]: DEBUG nova.network.neutron [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updating instance_info_cache with network_info: [{"id": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "address": "fa:16:3e:fe:5c:41", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd0a8ed6-d1", "ovs_interfaceid": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2060.561459] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Releasing lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2060.564896] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "cd00cb0e-30e5-4a0c-8612-ea92e5e32edd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.936s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2060.684929] env[63279]: DEBUG nova.compute.manager [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Stashing vm_state: active {{(pid=63279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2060.934399] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.157s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2060.940638] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.845s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2060.941197] env[63279]: DEBUG nova.objects.instance [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lazy-loading 'resources' on Instance uuid fb124cfa-24b4-4712-b8cc-c87df5d6231b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2060.942569] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087385, 'name': Rename_Task, 'duration_secs': 0.561315} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.945429] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2060.946106] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-457ebd92-4d8e-4be2-a689-1d1c7256c3e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.962567] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2060.962567] env[63279]: value = "task-2087387" [ 2060.962567] env[63279]: _type = "Task" [ 2060.962567] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.968379] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087386, 'name': ReconfigVM_Task, 'duration_secs': 1.017483} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.973415] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Reconfigured VM instance instance-0000003e to attach disk [datastore1] fd9b1666-8e06-4ed0-9187-05a40e136a1d/fd9b1666-8e06-4ed0-9187-05a40e136a1d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2060.974980] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-adc5d799-9fc0-4f58-91d9-867baf9b70ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.977779] env[63279]: INFO nova.scheduler.client.report [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Deleted allocations for instance 1b2ca21b-feea-4fc1-9ddc-99f144e4241a [ 2060.986471] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087387, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.988305] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2060.988305] env[63279]: value = "task-2087388" [ 2060.988305] env[63279]: _type = "Task" [ 2060.988305] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.997777] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087388, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.051028] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2061.051262] env[63279]: DEBUG nova.compute.manager [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Instance network_info: |[{"id": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "address": "fa:16:3e:fe:5c:41", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd0a8ed6-d1", "ovs_interfaceid": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2061.051778] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:5c:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd0a8ed6-d1ee-4266-8bde-e866ac2873ce', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2061.063461] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Creating folder: Project (131db3d2daa24712b6e11592cf789b33). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2061.063461] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33d53af3-657d-48ee-b4a0-59578080deaa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.069339] env[63279]: DEBUG nova.compute.manager [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2061.082816] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Created folder: Project (131db3d2daa24712b6e11592cf789b33) in parent group-v427491. [ 2061.083631] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Creating folder: Instances. Parent ref: group-v427666. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2061.083631] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23dc5393-26bc-4ef6-8927-5ae9319e98a8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.093750] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Created folder: Instances in parent group-v427666. [ 2061.094127] env[63279]: DEBUG oslo.service.loopingcall [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2061.094487] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2061.094893] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8fcf1b7-8256-45b5-96a7-b422c504a273 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.118851] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2061.118851] env[63279]: value = "task-2087391" [ 2061.118851] env[63279]: _type = "Task" [ 2061.118851] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.129086] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087391, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.162343] env[63279]: DEBUG nova.compute.manager [req-73aec38d-45aa-4c83-9a53-3faee65e070d req-73887454-a1e5-49b8-927f-20934c79ec4d service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Received event network-changed-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2061.162666] env[63279]: DEBUG nova.compute.manager [req-73aec38d-45aa-4c83-9a53-3faee65e070d req-73887454-a1e5-49b8-927f-20934c79ec4d service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Refreshing instance network info cache due to event network-changed-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2061.162992] env[63279]: DEBUG oslo_concurrency.lockutils [req-73aec38d-45aa-4c83-9a53-3faee65e070d req-73887454-a1e5-49b8-927f-20934c79ec4d service nova] Acquiring lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2061.163262] env[63279]: DEBUG oslo_concurrency.lockutils [req-73aec38d-45aa-4c83-9a53-3faee65e070d req-73887454-a1e5-49b8-927f-20934c79ec4d service nova] Acquired lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2061.163505] env[63279]: DEBUG nova.network.neutron [req-73aec38d-45aa-4c83-9a53-3faee65e070d req-73887454-a1e5-49b8-927f-20934c79ec4d service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Refreshing network info cache for port cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2061.205481] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.483499] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087387, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.490468] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0c3a37b9-f6dd-4e50-acdd-abd77fa39720 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "1b2ca21b-feea-4fc1-9ddc-99f144e4241a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.280s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2061.500893] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087388, 'name': Rename_Task, 'duration_secs': 0.174434} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.503920] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2061.504634] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f32ef62-678d-4d7f-9693-8c929f89f918 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.512929] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2061.512929] env[63279]: value = "task-2087392" [ 2061.512929] env[63279]: _type = "Task" [ 2061.512929] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.524650] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087392, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.596620] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.629288] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087391, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.978256] env[63279]: DEBUG oslo_vmware.api [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087387, 'name': PowerOnVM_Task, 'duration_secs': 0.692659} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.978574] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2061.979106] env[63279]: INFO nova.compute.manager [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Took 6.45 seconds to spawn the instance on the hypervisor. [ 2061.979106] env[63279]: DEBUG nova.compute.manager [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2061.980716] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb85ea5-7cf7-432d-9ed3-fa6f1e482c75 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.001629] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89aef313-b8f6-42a6-9f48-c3c58abb9d3f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.005192] env[63279]: DEBUG nova.network.neutron [req-73aec38d-45aa-4c83-9a53-3faee65e070d req-73887454-a1e5-49b8-927f-20934c79ec4d service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updated VIF entry in instance network info cache for port cd0a8ed6-d1ee-4266-8bde-e866ac2873ce. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2062.005571] env[63279]: DEBUG nova.network.neutron [req-73aec38d-45aa-4c83-9a53-3faee65e070d req-73887454-a1e5-49b8-927f-20934c79ec4d service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updating instance_info_cache with network_info: [{"id": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "address": "fa:16:3e:fe:5c:41", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd0a8ed6-d1", "ovs_interfaceid": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2062.012039] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f807610c-0186-43fa-b16e-dde0fd011d12 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.061443] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087392, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.062687] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49e5576-de76-4f1d-a597-ee502ff8c13e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.073375] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c62b75e-41ae-4596-9b16-59244e2d4aed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.084141] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da17a5e-4088-4eec-8907-720874e3103c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.095906] env[63279]: DEBUG nova.compute.provider_tree [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2062.114664] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance 'f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b' progress to 0 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2062.131885] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087391, 'name': CreateVM_Task, 'duration_secs': 0.563703} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.132174] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2062.132952] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.133221] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.133710] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2062.134370] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afca1650-dc08-4106-920c-8bf52956f263 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.139169] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2062.139169] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52fbcc83-d044-dcf0-fa00-2e63a0950182" [ 2062.139169] env[63279]: _type = "Task" [ 2062.139169] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.149165] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fbcc83-d044-dcf0-fa00-2e63a0950182, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.503589] env[63279]: INFO nova.compute.manager [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Took 47.29 seconds to build instance. [ 2062.509240] env[63279]: DEBUG oslo_concurrency.lockutils [req-73aec38d-45aa-4c83-9a53-3faee65e070d req-73887454-a1e5-49b8-927f-20934c79ec4d service nova] Releasing lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2062.527322] env[63279]: DEBUG oslo_vmware.api [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087392, 'name': PowerOnVM_Task, 'duration_secs': 0.584907} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.527745] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2062.528037] env[63279]: INFO nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Took 9.74 seconds to spawn the instance on the hypervisor. [ 2062.528272] env[63279]: DEBUG nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2062.529140] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d594f419-3f11-4087-a617-443e5af86fe6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.599516] env[63279]: DEBUG nova.scheduler.client.report [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2062.622617] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2062.623264] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dcc2194c-809b-41c4-9484-16685570dd5a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.630891] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2062.630891] env[63279]: value = "task-2087393" [ 2062.630891] env[63279]: _type = "Task" [ 2062.630891] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.640397] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087393, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.649570] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fbcc83-d044-dcf0-fa00-2e63a0950182, 'name': SearchDatastore_Task, 'duration_secs': 0.013643} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.649889] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2062.650170] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2062.650419] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.650572] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.650776] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2062.651409] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb3eb985-656c-4fe6-ba5a-4504d2299f1d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.661174] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2062.661392] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2062.662204] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53252834-4d55-4e37-b089-55e47ae21882 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.671405] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2062.671405] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523d7e1d-5538-c005-6d81-aed016355d37" [ 2062.671405] env[63279]: _type = "Task" [ 2062.671405] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.677792] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523d7e1d-5538-c005-6d81-aed016355d37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.005889] env[63279]: DEBUG oslo_concurrency.lockutils [None req-203ba856-8750-4748-8a31-13e38e82c1be tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "6699de0a-b3f8-4d84-9c9b-d0f6899a606e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.689s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.050082] env[63279]: INFO nova.compute.manager [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Took 48.38 seconds to build instance. [ 2063.105358] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.165s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.107752] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.068s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2063.109287] env[63279]: INFO nova.compute.claims [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2063.137345] env[63279]: INFO nova.scheduler.client.report [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Deleted allocations for instance fb124cfa-24b4-4712-b8cc-c87df5d6231b [ 2063.145030] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087393, 'name': PowerOffVM_Task, 'duration_secs': 0.265536} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.145561] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2063.145763] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance 'f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b' progress to 17 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2063.181276] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523d7e1d-5538-c005-6d81-aed016355d37, 'name': SearchDatastore_Task, 'duration_secs': 0.011989} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.183499] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be633e54-3e6d-4309-b557-04341fde18e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.188892] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2063.188892] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5292028c-cc67-fa3e-07c5-10f6765a9da5" [ 2063.188892] env[63279]: _type = "Task" [ 2063.188892] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.197354] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5292028c-cc67-fa3e-07c5-10f6765a9da5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.509413] env[63279]: DEBUG nova.compute.manager [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2063.555631] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656b4970-6425-49b0-a2f9-e0f94118e12e tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "fd9b1666-8e06-4ed0-9187-05a40e136a1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.884s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.653734] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2063.654190] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2063.654412] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2063.655468] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2063.655692] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2063.655892] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2063.656286] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2063.657027] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2063.657502] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2063.657913] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2063.658282] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2063.665308] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f724e1c-d0b9-4778-8593-5bcec5c9cb39 tempest-MultipleCreateTestJSON-79500308 tempest-MultipleCreateTestJSON-79500308-project-member] Lock "fb124cfa-24b4-4712-b8cc-c87df5d6231b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.982s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.670119] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-013ab673-8a5a-446b-88ab-088a425864ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.690045] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2063.690045] env[63279]: value = "task-2087394" [ 2063.690045] env[63279]: _type = "Task" [ 2063.690045] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.709975] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5292028c-cc67-fa3e-07c5-10f6765a9da5, 'name': SearchDatastore_Task, 'duration_secs': 0.024266} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.714357] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.715148] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] a15141bc-424d-48ca-a6d5-c859a3639a0b/a15141bc-424d-48ca-a6d5-c859a3639a0b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2063.715856] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087394, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.717797] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04905f96-616e-4910-be02-2657130441af {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.727307] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2063.727307] env[63279]: value = "task-2087395" [ 2063.727307] env[63279]: _type = "Task" [ 2063.727307] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.737230] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087395, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.036643] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.203882] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "795560b4-ccdc-4012-8130-042dcb94085f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.203987] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "795560b4-ccdc-4012-8130-042dcb94085f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.204705] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "795560b4-ccdc-4012-8130-042dcb94085f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.204705] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "795560b4-ccdc-4012-8130-042dcb94085f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.205239] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "795560b4-ccdc-4012-8130-042dcb94085f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.215061] env[63279]: INFO nova.compute.manager [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Terminating instance [ 2064.225932] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087394, 'name': ReconfigVM_Task, 'duration_secs': 0.266106} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.227369] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance 'f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b' progress to 33 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2064.245533] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087395, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.665072] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2790771-2d88-4e7c-9222-30936c6394c6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.675409] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec14777-b2e0-470c-b601-4c2d641fd872 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.711049] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232e6a2b-cee8-4f9a-9779-60f91832df87 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.720183] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca49158-d095-444e-8993-f37827356068 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.735233] env[63279]: DEBUG nova.compute.manager [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2064.735466] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2064.737802] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:53:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='ca62e09f-97fd-4b30-aed4-3874eb2dace8',id=29,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-212803514',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2064.738083] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2064.738267] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2064.738463] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2064.738643] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2064.738799] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2064.739038] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2064.739296] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2064.739413] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2064.739608] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2064.739803] env[63279]: DEBUG nova.virt.hardware [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2064.745523] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Reconfiguring VM instance instance-00000036 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2064.746543] env[63279]: DEBUG nova.compute.provider_tree [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2064.748399] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac475b6c-41f4-4949-929d-a4f6d2868e12 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.754820] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52bc8d40-ff34-426e-9020-44350107e739 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.769935] env[63279]: DEBUG nova.scheduler.client.report [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2064.784050] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087395, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661493} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.789593] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] a15141bc-424d-48ca-a6d5-c859a3639a0b/a15141bc-424d-48ca-a6d5-c859a3639a0b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2064.789593] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2064.789593] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2064.789593] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2064.789593] env[63279]: value = "task-2087396" [ 2064.789593] env[63279]: _type = "Task" [ 2064.789593] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.789593] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-add8df28-06b9-4474-b30b-d11e56ce1d23 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.790558] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d875214b-745d-491a-97a7-2202c937315c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.803815] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087396, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.807661] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2064.807661] env[63279]: value = "task-2087397" [ 2064.807661] env[63279]: _type = "Task" [ 2064.807661] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.807661] env[63279]: DEBUG oslo_vmware.api [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2064.807661] env[63279]: value = "task-2087398" [ 2064.807661] env[63279]: _type = "Task" [ 2064.807661] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.819749] env[63279]: DEBUG oslo_vmware.api [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087398, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.823494] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087397, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.279605] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.169s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.279605] env[63279]: DEBUG nova.compute.manager [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2065.283154] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.602s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.283391] env[63279]: DEBUG nova.objects.instance [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Lazy-loading 'resources' on Instance uuid ff9701ed-d545-44b4-911a-c4d809d0a771 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2065.304116] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087396, 'name': ReconfigVM_Task, 'duration_secs': 0.331969} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.304432] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Reconfigured VM instance instance-00000036 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2065.305248] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdf877a-667a-43f6-898a-056c8ec12881 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.336528] env[63279]: DEBUG oslo_vmware.api [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087398, 'name': PowerOffVM_Task, 'duration_secs': 0.297784} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.336804] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087397, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.13246} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.344401] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b/f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2065.344739] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2065.344912] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2065.345169] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2065.345738] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-357ce587-d929-4862-85c0-5ce2860603cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.358750] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-718a2cf7-7688-4062-a9fb-0238b47139df {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.360826] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f8638e-0de9-4928-a5f2-4acc5e4cbbc9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.385174] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] a15141bc-424d-48ca-a6d5-c859a3639a0b/a15141bc-424d-48ca-a6d5-c859a3639a0b.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2065.387333] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ceb635fd-3ef0-4e57-a116-918fde583635 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.404161] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2065.404161] env[63279]: value = "task-2087400" [ 2065.404161] env[63279]: _type = "Task" [ 2065.404161] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.409902] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2065.409902] env[63279]: value = "task-2087401" [ 2065.409902] env[63279]: _type = "Task" [ 2065.409902] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.417447] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087400, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.423606] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087401, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.609290] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2065.609725] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2065.610025] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Deleting the datastore file [datastore1] 795560b4-ccdc-4012-8130-042dcb94085f {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2065.610448] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e097c04-31a3-4e36-aa44-a7f45f6f9cda {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.617724] env[63279]: DEBUG oslo_vmware.api [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2065.617724] env[63279]: value = "task-2087402" [ 2065.617724] env[63279]: _type = "Task" [ 2065.617724] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.628241] env[63279]: DEBUG oslo_vmware.api [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087402, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.789984] env[63279]: DEBUG nova.compute.utils [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2065.791661] env[63279]: DEBUG nova.compute.manager [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Not allocating networking since 'none' was specified. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2065.914251] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087400, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.924465] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087401, 'name': ReconfigVM_Task, 'duration_secs': 0.507195} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.924753] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Reconfigured VM instance instance-00000040 to attach disk [datastore1] a15141bc-424d-48ca-a6d5-c859a3639a0b/a15141bc-424d-48ca-a6d5-c859a3639a0b.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2065.925407] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9b7f5899-ed6d-4a44-8ed8-5a172389b421 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.932023] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2065.932023] env[63279]: value = "task-2087403" [ 2065.932023] env[63279]: _type = "Task" [ 2065.932023] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.941376] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087403, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.128508] env[63279]: DEBUG oslo_vmware.api [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087402, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.419246} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.131172] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2066.131388] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2066.131575] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2066.131754] env[63279]: INFO nova.compute.manager [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Took 1.40 seconds to destroy the instance on the hypervisor. [ 2066.131998] env[63279]: DEBUG oslo.service.loopingcall [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2066.132400] env[63279]: DEBUG nova.compute.manager [-] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2066.132501] env[63279]: DEBUG nova.network.neutron [-] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2066.265459] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036b5efa-3d2b-4f0c-8f64-f52bf7f98a2a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.273266] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26399726-1274-4ef9-94aa-ca898640fe7d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.304085] env[63279]: DEBUG nova.compute.manager [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2066.313042] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90fed39-b470-48f7-9c07-2dc7b783357a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.315664] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e905785-9107-4ded-b511-4a12505f6327 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.330015] env[63279]: DEBUG nova.compute.provider_tree [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2066.426901] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087400, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.442819] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087403, 'name': Rename_Task, 'duration_secs': 0.280642} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.443131] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2066.444191] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4bb093e-46eb-44cd-9c37-53868f176c0d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.451411] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2066.451411] env[63279]: value = "task-2087404" [ 2066.451411] env[63279]: _type = "Task" [ 2066.451411] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.459580] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087404, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.540118] env[63279]: DEBUG nova.compute.manager [req-591d7ef3-8443-49c9-ab42-7edb5a981a8d req-38ce60fc-e89b-4bac-a9bd-c0af72868f88 service nova] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Received event network-vif-deleted-d7d164bc-29f6-4434-8980-357d7e34c723 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2066.540370] env[63279]: INFO nova.compute.manager [req-591d7ef3-8443-49c9-ab42-7edb5a981a8d req-38ce60fc-e89b-4bac-a9bd-c0af72868f88 service nova] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Neutron deleted interface d7d164bc-29f6-4434-8980-357d7e34c723; detaching it from the instance and deleting it from the info cache [ 2066.540556] env[63279]: DEBUG nova.network.neutron [req-591d7ef3-8443-49c9-ab42-7edb5a981a8d req-38ce60fc-e89b-4bac-a9bd-c0af72868f88 service nova] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2066.834874] env[63279]: DEBUG nova.scheduler.client.report [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2066.915101] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087400, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.961699] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087404, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.962051] env[63279]: DEBUG nova.network.neutron [-] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.045143] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15f4e432-0acc-438a-a453-1c02839865cf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.054862] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31f8e2e-d69a-4476-9d67-510a4aa8eebd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.099129] env[63279]: DEBUG nova.compute.manager [req-591d7ef3-8443-49c9-ab42-7edb5a981a8d req-38ce60fc-e89b-4bac-a9bd-c0af72868f88 service nova] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Detach interface failed, port_id=d7d164bc-29f6-4434-8980-357d7e34c723, reason: Instance 795560b4-ccdc-4012-8130-042dcb94085f could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2067.316989] env[63279]: DEBUG nova.compute.manager [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2067.344118] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.061s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.348768] env[63279]: DEBUG nova.virt.hardware [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2067.349096] env[63279]: DEBUG nova.virt.hardware [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2067.349581] env[63279]: DEBUG nova.virt.hardware [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2067.349877] env[63279]: DEBUG nova.virt.hardware [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2067.350102] env[63279]: DEBUG nova.virt.hardware [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2067.350308] env[63279]: DEBUG nova.virt.hardware [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2067.350571] env[63279]: DEBUG nova.virt.hardware [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2067.350783] env[63279]: DEBUG nova.virt.hardware [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2067.351019] env[63279]: DEBUG nova.virt.hardware [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2067.351276] env[63279]: DEBUG nova.virt.hardware [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2067.351535] env[63279]: DEBUG nova.virt.hardware [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2067.351921] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.551s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.353447] env[63279]: INFO nova.compute.claims [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2067.357561] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415d2814-e058-4e5a-9832-27de1bf795f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.367668] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c65b84-ac6e-40f1-8a06-a0d4afacb769 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.372773] env[63279]: INFO nova.scheduler.client.report [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Deleted allocations for instance ff9701ed-d545-44b4-911a-c4d809d0a771 [ 2067.389590] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Instance VIF info [] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2067.396476] env[63279]: DEBUG oslo.service.loopingcall [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2067.397365] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2067.397803] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-263a54df-899f-4038-a959-6c273db53aa1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.425142] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087400, 'name': ReconfigVM_Task, 'duration_secs': 1.547369} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.426670] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Reconfigured VM instance instance-00000036 to attach disk [datastore1] f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b/f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2067.426956] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance 'f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b' progress to 50 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2067.430849] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2067.430849] env[63279]: value = "task-2087405" [ 2067.430849] env[63279]: _type = "Task" [ 2067.430849] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.441203] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087405, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.467354] env[63279]: INFO nova.compute.manager [-] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Took 1.33 seconds to deallocate network for instance. [ 2067.467519] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087404, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.901189] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fc1fded0-bb06-4bd3-9e42-05bd80dfbf53 tempest-InstanceActionsV221TestJSON-1895476841 tempest-InstanceActionsV221TestJSON-1895476841-project-member] Lock "ff9701ed-d545-44b4-911a-c4d809d0a771" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.319s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.935020] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd56b4b-d093-4995-9e2c-f7dcd3603b77 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.947166] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087405, 'name': CreateVM_Task, 'duration_secs': 0.403916} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.960804] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2067.961910] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2067.962121] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2067.962474] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2067.966150] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4811f8-917a-4305-b97c-5714c21f882b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.970338] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fda8d8e5-034d-4945-ad6a-e83193e73729 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.974457] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2067.992367] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2067.992367] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b4cb28-8828-2368-8cfe-7dff1d9c1abc" [ 2067.992367] env[63279]: _type = "Task" [ 2067.992367] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.992602] env[63279]: DEBUG oslo_vmware.api [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087404, 'name': PowerOnVM_Task, 'duration_secs': 1.038613} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.993414] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance 'f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b' progress to 67 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2067.996490] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2067.996724] env[63279]: INFO nova.compute.manager [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Took 9.85 seconds to spawn the instance on the hypervisor. [ 2067.996907] env[63279]: DEBUG nova.compute.manager [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2068.001703] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e25fad4-65c9-45f2-af6f-692604570175 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.010826] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b4cb28-8828-2368-8cfe-7dff1d9c1abc, 'name': SearchDatastore_Task, 'duration_secs': 0.013174} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.012625] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2068.012835] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2068.013100] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2068.013285] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2068.013472] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2068.017128] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3aeb770-b451-4472-b62e-6b9c6e0879ff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.026125] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2068.026348] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2068.027115] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed2e9402-517c-4869-b66e-af207b8892ae {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.033743] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2068.033743] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5267a556-2316-e683-1996-d2d692c856ca" [ 2068.033743] env[63279]: _type = "Task" [ 2068.033743] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.043533] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5267a556-2316-e683-1996-d2d692c856ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.526248] env[63279]: INFO nova.compute.manager [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Took 51.31 seconds to build instance. [ 2068.546311] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5267a556-2316-e683-1996-d2d692c856ca, 'name': SearchDatastore_Task, 'duration_secs': 0.012909} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.550825] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08ff703f-c66c-474c-b2d9-f1be6fa76ec8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.556113] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2068.556113] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]528ca577-2b1f-5a3a-41b6-e20f920c9cf8" [ 2068.556113] env[63279]: _type = "Task" [ 2068.556113] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.565768] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]528ca577-2b1f-5a3a-41b6-e20f920c9cf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.576845] env[63279]: DEBUG nova.network.neutron [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Port cbbfde33-a0b6-4403-8a1e-d688a0a7147b binding to destination host cpu-1 is already ACTIVE {{(pid=63279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2068.829833] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e371aa1-64d8-46a3-b49a-2806161d2f4a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.839608] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd5cb32-902d-47dc-8157-a3cf65f7ce62 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.870267] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f497c5a-76cd-4193-bb83-01b9b4abad6c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.879030] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e53414c-53b1-4e69-bcc3-f3563ce96289 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.891779] env[63279]: DEBUG nova.compute.provider_tree [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2069.028745] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0b9b77cd-0839-40e4-8a61-bde483c6a590 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.752s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.066535] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]528ca577-2b1f-5a3a-41b6-e20f920c9cf8, 'name': SearchDatastore_Task, 'duration_secs': 0.026944} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.066829] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2069.067107] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] df963c29-a1c4-4f28-be95-cafe3af4d2fa/df963c29-a1c4-4f28-be95-cafe3af4d2fa.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2069.067391] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ffd7de44-540b-4483-9792-759fe61843fb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.074810] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2069.074810] env[63279]: value = "task-2087406" [ 2069.074810] env[63279]: _type = "Task" [ 2069.074810] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.087230] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087406, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.395165] env[63279]: DEBUG nova.scheduler.client.report [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2069.439874] env[63279]: DEBUG nova.compute.manager [req-8edf1af8-3ecf-4f3a-b305-264512f712ec req-a7e13a8c-22c9-4909-89da-dfcfcbbf46d6 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Received event network-changed-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2069.440126] env[63279]: DEBUG nova.compute.manager [req-8edf1af8-3ecf-4f3a-b305-264512f712ec req-a7e13a8c-22c9-4909-89da-dfcfcbbf46d6 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Refreshing instance network info cache due to event network-changed-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2069.440427] env[63279]: DEBUG oslo_concurrency.lockutils [req-8edf1af8-3ecf-4f3a-b305-264512f712ec req-a7e13a8c-22c9-4909-89da-dfcfcbbf46d6 service nova] Acquiring lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2069.440662] env[63279]: DEBUG oslo_concurrency.lockutils [req-8edf1af8-3ecf-4f3a-b305-264512f712ec req-a7e13a8c-22c9-4909-89da-dfcfcbbf46d6 service nova] Acquired lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2069.440865] env[63279]: DEBUG nova.network.neutron [req-8edf1af8-3ecf-4f3a-b305-264512f712ec req-a7e13a8c-22c9-4909-89da-dfcfcbbf46d6 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Refreshing network info cache for port cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2069.588318] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087406, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.616961] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.617660] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.618126] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.901327] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.549s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.902246] env[63279]: DEBUG nova.compute.manager [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2069.904679] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 41.780s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.905393] env[63279]: DEBUG nova.objects.instance [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2070.091022] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087406, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.595751} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.091022] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] df963c29-a1c4-4f28-be95-cafe3af4d2fa/df963c29-a1c4-4f28-be95-cafe3af4d2fa.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2070.091022] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2070.091022] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a285c895-04f3-4313-ba16-6ad8cb83a611 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.101023] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2070.101023] env[63279]: value = "task-2087407" [ 2070.101023] env[63279]: _type = "Task" [ 2070.101023] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.116835] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087407, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.320428] env[63279]: DEBUG nova.network.neutron [req-8edf1af8-3ecf-4f3a-b305-264512f712ec req-a7e13a8c-22c9-4909-89da-dfcfcbbf46d6 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updated VIF entry in instance network info cache for port cd0a8ed6-d1ee-4266-8bde-e866ac2873ce. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2070.321287] env[63279]: DEBUG nova.network.neutron [req-8edf1af8-3ecf-4f3a-b305-264512f712ec req-a7e13a8c-22c9-4909-89da-dfcfcbbf46d6 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updating instance_info_cache with network_info: [{"id": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "address": "fa:16:3e:fe:5c:41", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd0a8ed6-d1", "ovs_interfaceid": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2070.410051] env[63279]: DEBUG nova.compute.utils [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2070.423529] env[63279]: DEBUG nova.compute.manager [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Not allocating networking since 'none' was specified. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2070.588994] env[63279]: DEBUG oslo_vmware.rw_handles [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528efa05-1c00-c51a-f9e8-22b5bede4f48/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2070.590208] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2dee55a-83b3-4357-be91-6ce45e71ce5c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.597386] env[63279]: DEBUG oslo_vmware.rw_handles [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528efa05-1c00-c51a-f9e8-22b5bede4f48/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2070.597561] env[63279]: ERROR oslo_vmware.rw_handles [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528efa05-1c00-c51a-f9e8-22b5bede4f48/disk-0.vmdk due to incomplete transfer. [ 2070.597811] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-aa40d465-df30-4528-aefd-f1d204a54f6f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.615446] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087407, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.144008} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.616746] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2070.617084] env[63279]: DEBUG oslo_vmware.rw_handles [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528efa05-1c00-c51a-f9e8-22b5bede4f48/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2070.617270] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Uploaded image 9d99d2f6-606a-4ba2-a64b-746ca6b6b68e to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2070.619492] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2070.620312] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009358c8-d30e-4b29-8d21-63763483ac28 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.629125] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4674d29c-dfd2-4d95-88b8-53446e2b2363 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.658996] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] df963c29-a1c4-4f28-be95-cafe3af4d2fa/df963c29-a1c4-4f28-be95-cafe3af4d2fa.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2070.661083] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-648d946c-40e0-4086-b19c-6e4700f499a9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.675914] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2070.675914] env[63279]: value = "task-2087408" [ 2070.675914] env[63279]: _type = "Task" [ 2070.675914] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.681608] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2070.681803] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2070.681989] env[63279]: DEBUG nova.network.neutron [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2070.684705] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2070.684705] env[63279]: value = "task-2087409" [ 2070.684705] env[63279]: _type = "Task" [ 2070.684705] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.692138] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087408, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.697441] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087409, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.824652] env[63279]: DEBUG oslo_concurrency.lockutils [req-8edf1af8-3ecf-4f3a-b305-264512f712ec req-a7e13a8c-22c9-4909-89da-dfcfcbbf46d6 service nova] Releasing lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2070.920706] env[63279]: DEBUG nova.compute.manager [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2070.925197] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ddb667c-4266-446f-abce-7527abe6be67 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.926260] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.358s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2070.926499] env[63279]: DEBUG nova.objects.instance [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lazy-loading 'resources' on Instance uuid 8ccb4293-927a-45ba-82e9-9f1b4d5985cc {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2071.188387] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087408, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.196940] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087409, 'name': ReconfigVM_Task, 'duration_secs': 0.446544} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.197261] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Reconfigured VM instance instance-00000041 to attach disk [datastore1] df963c29-a1c4-4f28-be95-cafe3af4d2fa/df963c29-a1c4-4f28-be95-cafe3af4d2fa.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2071.197873] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67feb69d-25b0-4efb-afcc-4929c5e6a596 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.205275] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2071.205275] env[63279]: value = "task-2087410" [ 2071.205275] env[63279]: _type = "Task" [ 2071.205275] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.217841] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087410, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.529592] env[63279]: DEBUG nova.network.neutron [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance_info_cache with network_info: [{"id": "cbbfde33-a0b6-4403-8a1e-d688a0a7147b", "address": "fa:16:3e:47:7e:05", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbbfde33-a0", "ovs_interfaceid": "cbbfde33-a0b6-4403-8a1e-d688a0a7147b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2071.688408] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087408, 'name': Destroy_Task, 'duration_secs': 0.578192} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.688408] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Destroyed the VM [ 2071.688408] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2071.688408] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ae99b2af-7a88-4aa6-acc0-8220f8e6f4f4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.694375] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2071.694375] env[63279]: value = "task-2087411" [ 2071.694375] env[63279]: _type = "Task" [ 2071.694375] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.704721] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087411, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.725432] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087410, 'name': Rename_Task, 'duration_secs': 0.141808} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.725760] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2071.726134] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb6cd68f-8e56-4cc9-bfff-5e3f0c86a9bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.737750] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2071.737750] env[63279]: value = "task-2087412" [ 2071.737750] env[63279]: _type = "Task" [ 2071.737750] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.746667] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087412, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.893895] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3eb1661-35cb-4050-b532-9abcf4d3fada {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.901783] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc22ebdb-ad8d-4055-9b4c-8d0cef84891f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.934513] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3eb9bd-8153-424f-9831-397bd2842adc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.938192] env[63279]: DEBUG nova.compute.manager [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2071.947513] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a03ba7-abb2-445b-938d-769241e8f682 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.964216] env[63279]: DEBUG nova.compute.provider_tree [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2071.974055] env[63279]: DEBUG nova.virt.hardware [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2071.974055] env[63279]: DEBUG nova.virt.hardware [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2071.974055] env[63279]: DEBUG nova.virt.hardware [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2071.974347] env[63279]: DEBUG nova.virt.hardware [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2071.974820] env[63279]: DEBUG nova.virt.hardware [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2071.974820] env[63279]: DEBUG nova.virt.hardware [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2071.974933] env[63279]: DEBUG nova.virt.hardware [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2071.975114] env[63279]: DEBUG nova.virt.hardware [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2071.975523] env[63279]: DEBUG nova.virt.hardware [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2071.975523] env[63279]: DEBUG nova.virt.hardware [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2071.975847] env[63279]: DEBUG nova.virt.hardware [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2071.976717] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb48acda-131a-4773-bd6f-2d02b15f3b92 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.986129] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90725383-8762-49cd-a46c-98e00533d1e7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.003696] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Instance VIF info [] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2072.012171] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Creating folder: Project (fc6cd67350fa4186afda8409fb2a36ed). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2072.012527] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d272b542-0e61-41cb-afce-a6e2b7c136c9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.026496] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Created folder: Project (fc6cd67350fa4186afda8409fb2a36ed) in parent group-v427491. [ 2072.027527] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Creating folder: Instances. Parent ref: group-v427670. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2072.027527] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f9c7331-5eb7-412c-8320-4c990df95d2f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.034534] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Releasing lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2072.040333] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Created folder: Instances in parent group-v427670. [ 2072.040436] env[63279]: DEBUG oslo.service.loopingcall [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2072.040619] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2072.040855] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95790010-1e9f-403d-9210-b1ebf2385abe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.058042] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2072.058042] env[63279]: value = "task-2087415" [ 2072.058042] env[63279]: _type = "Task" [ 2072.058042] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.065959] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087415, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.204330] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087411, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.247679] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087412, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.468793] env[63279]: DEBUG nova.scheduler.client.report [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2072.574892] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087415, 'name': CreateVM_Task, 'duration_secs': 0.318105} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.577932] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2072.577932] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2072.577932] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2072.577932] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2072.577932] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd0e757c-0f19-4efa-afc8-e9e60c178e3d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.580214] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53bd861-4927-4ee9-a13b-3ad41445dfd5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.599196] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5945d09-e3ce-444e-97ec-e1bd8427bc96 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.602069] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2072.602069] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a7f1ab-ce78-c442-7267-c28d84b74361" [ 2072.602069] env[63279]: _type = "Task" [ 2072.602069] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.607967] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance 'f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b' progress to 83 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2072.616720] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a7f1ab-ce78-c442-7267-c28d84b74361, 'name': SearchDatastore_Task, 'duration_secs': 0.023638} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.617151] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2072.617504] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2072.617858] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2072.618148] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2072.618451] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2072.618787] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68b9ff66-a7e1-4fca-978c-bb37bef66bfd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.630979] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2072.631296] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2072.632090] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f35e94c-8cf7-41b7-ae80-a8537eae583e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.636967] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2072.636967] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5267950a-b3cc-12f5-54a7-678d4b4482a6" [ 2072.636967] env[63279]: _type = "Task" [ 2072.636967] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.644219] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5267950a-b3cc-12f5-54a7-678d4b4482a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.704024] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087411, 'name': RemoveSnapshot_Task, 'duration_secs': 0.566578} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.704937] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2072.704937] env[63279]: DEBUG nova.compute.manager [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2072.706078] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56954c03-fcfd-4fff-ab95-a245703c005e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.748052] env[63279]: DEBUG oslo_vmware.api [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087412, 'name': PowerOnVM_Task, 'duration_secs': 0.581816} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.748696] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2072.748696] env[63279]: INFO nova.compute.manager [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Took 5.43 seconds to spawn the instance on the hypervisor. [ 2072.748828] env[63279]: DEBUG nova.compute.manager [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2072.749650] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-553d56cf-54e6-4105-b09e-a51b33c5dcc3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.974078] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.048s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.976608] env[63279]: DEBUG oslo_concurrency.lockutils [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.504s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2072.976844] env[63279]: DEBUG nova.objects.instance [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lazy-loading 'resources' on Instance uuid 357f08c9-4de9-4b84-8384-6bf130872f40 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2073.003821] env[63279]: INFO nova.scheduler.client.report [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Deleted allocations for instance 8ccb4293-927a-45ba-82e9-9f1b4d5985cc [ 2073.114856] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2073.115180] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-20d58561-43b2-4097-b503-f3da1ca78d36 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.123533] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2073.123533] env[63279]: value = "task-2087416" [ 2073.123533] env[63279]: _type = "Task" [ 2073.123533] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.133180] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087416, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.151389] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5267950a-b3cc-12f5-54a7-678d4b4482a6, 'name': SearchDatastore_Task, 'duration_secs': 0.017787} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.152811] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02b73e64-904f-4079-8237-3f87eb5f1b6f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.160694] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2073.160694] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d722de-e0c0-89c4-fd07-d08106419a81" [ 2073.160694] env[63279]: _type = "Task" [ 2073.160694] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.170549] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d722de-e0c0-89c4-fd07-d08106419a81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.218968] env[63279]: INFO nova.compute.manager [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Shelve offloading [ 2073.276397] env[63279]: INFO nova.compute.manager [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Took 49.26 seconds to build instance. [ 2073.517968] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d6ed46b7-baf3-4c58-92b0-8ca9f050dbbc tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "8ccb4293-927a-45ba-82e9-9f1b4d5985cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.834s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2073.636131] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087416, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.671909] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d722de-e0c0-89c4-fd07-d08106419a81, 'name': SearchDatastore_Task, 'duration_secs': 0.011988} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.674701] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2073.675040] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 4871421f-0015-4973-bb5f-c9042d411c82/4871421f-0015-4973-bb5f-c9042d411c82.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2073.675498] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b810605-356b-4ea2-b053-64c71f530d09 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.682543] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2073.682543] env[63279]: value = "task-2087417" [ 2073.682543] env[63279]: _type = "Task" [ 2073.682543] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.695068] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087417, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.723780] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2073.723780] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eebe93e4-5d0a-4192-ba63-0e94bc02f268 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.730021] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2073.730021] env[63279]: value = "task-2087418" [ 2073.730021] env[63279]: _type = "Task" [ 2073.730021] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.741403] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] VM already powered off {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2073.741614] env[63279]: DEBUG nova.compute.manager [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2073.742506] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a4b95e-e102-4d5a-ab73-af35bb141faa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.750806] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2073.750951] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquired lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2073.751209] env[63279]: DEBUG nova.network.neutron [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2073.782845] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b0743caa-f310-40a7-b364-e4022c9249e0 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "df963c29-a1c4-4f28-be95-cafe3af4d2fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.079s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2073.992453] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9341a46f-e9cc-4b3e-a8da-c0460e7a4deb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.003277] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f36fff9-2d82-4cb4-9d9e-44b4da51ec5a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.039403] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b06387b-e046-40d8-a89c-5aba3369a5b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.050591] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa907c9-f553-4650-84ad-e99239a46fdf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.068831] env[63279]: DEBUG nova.compute.provider_tree [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2074.143219] env[63279]: DEBUG oslo_vmware.api [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087416, 'name': PowerOnVM_Task, 'duration_secs': 0.538877} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2074.143219] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2074.143388] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d6a01e-b538-4983-b585-0874aad097ba tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance 'f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b' progress to 100 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2074.192761] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087417, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.394501] env[63279]: INFO nova.compute.manager [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Rebuilding instance [ 2074.454872] env[63279]: DEBUG nova.compute.manager [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2074.455915] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3e933f-e3ed-43c9-835e-025a11b6ec9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.574233] env[63279]: DEBUG nova.scheduler.client.report [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2074.610546] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "668436f9-94e9-48c2-a3d4-3df7bbcf5775" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2074.610546] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "668436f9-94e9-48c2-a3d4-3df7bbcf5775" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2074.613111] env[63279]: DEBUG nova.network.neutron [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Updating instance_info_cache with network_info: [{"id": "8383ed07-21e7-43e0-82a2-0afc179ca995", "address": "fa:16:3e:b6:12:ab", "network": {"id": "a7b83e75-3b16-41db-9395-90dead128e80", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-756195345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd8bdc7d8664db698236f56d82adcf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8383ed07-21", "ovs_interfaceid": "8383ed07-21e7-43e0-82a2-0afc179ca995", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2074.695524] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087417, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.947134} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2074.698537] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 4871421f-0015-4973-bb5f-c9042d411c82/4871421f-0015-4973-bb5f-c9042d411c82.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2074.698537] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2074.698537] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0ee7629-81cb-4568-b597-a4222a617e93 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.707158] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2074.707158] env[63279]: value = "task-2087419" [ 2074.707158] env[63279]: _type = "Task" [ 2074.707158] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2074.716543] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087419, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.080181] env[63279]: DEBUG oslo_concurrency.lockutils [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.103s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.083425] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.280s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.085156] env[63279]: INFO nova.compute.claims [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2075.104192] env[63279]: INFO nova.scheduler.client.report [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Deleted allocations for instance 357f08c9-4de9-4b84-8384-6bf130872f40 [ 2075.113625] env[63279]: DEBUG nova.compute.manager [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2075.116720] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Releasing lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2075.216683] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087419, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071028} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.216995] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2075.218352] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7422f78b-c359-453b-8f73-acdd88dcfc70 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.238912] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 4871421f-0015-4973-bb5f-c9042d411c82/4871421f-0015-4973-bb5f-c9042d411c82.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2075.241705] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cec4bd5-8c76-4495-b0de-0fbb47d14996 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.262404] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2075.262404] env[63279]: value = "task-2087420" [ 2075.262404] env[63279]: _type = "Task" [ 2075.262404] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.276929] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087420, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.419507] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Acquiring lock "a0697601-46ae-48ce-a3e1-3c4b81fc1f95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.420398] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Lock "a0697601-46ae-48ce-a3e1-3c4b81fc1f95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.471750] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2075.471750] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c854ce1-8216-42db-9d48-bc63cd800c2c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.480072] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2075.480072] env[63279]: value = "task-2087421" [ 2075.480072] env[63279]: _type = "Task" [ 2075.480072] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.491323] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087421, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.514603] env[63279]: DEBUG nova.compute.manager [req-e53addeb-7a5a-4b2a-81c5-d5e7f2650d0a req-69fc70d6-a157-4e51-9213-f442153826f9 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Received event network-vif-unplugged-8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2075.514835] env[63279]: DEBUG oslo_concurrency.lockutils [req-e53addeb-7a5a-4b2a-81c5-d5e7f2650d0a req-69fc70d6-a157-4e51-9213-f442153826f9 service nova] Acquiring lock "2f5e22f6-ba70-4848-965b-eb1553115323-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.515613] env[63279]: DEBUG oslo_concurrency.lockutils [req-e53addeb-7a5a-4b2a-81c5-d5e7f2650d0a req-69fc70d6-a157-4e51-9213-f442153826f9 service nova] Lock "2f5e22f6-ba70-4848-965b-eb1553115323-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.519076] env[63279]: DEBUG oslo_concurrency.lockutils [req-e53addeb-7a5a-4b2a-81c5-d5e7f2650d0a req-69fc70d6-a157-4e51-9213-f442153826f9 service nova] Lock "2f5e22f6-ba70-4848-965b-eb1553115323-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.519076] env[63279]: DEBUG nova.compute.manager [req-e53addeb-7a5a-4b2a-81c5-d5e7f2650d0a req-69fc70d6-a157-4e51-9213-f442153826f9 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] No waiting events found dispatching network-vif-unplugged-8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2075.519076] env[63279]: WARNING nova.compute.manager [req-e53addeb-7a5a-4b2a-81c5-d5e7f2650d0a req-69fc70d6-a157-4e51-9213-f442153826f9 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Received unexpected event network-vif-unplugged-8383ed07-21e7-43e0-82a2-0afc179ca995 for instance with vm_state shelved and task_state shelving_offloading. [ 2075.541337] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2075.542498] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb35ea9-d297-4d88-aadf-a2264fdbc491 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.550951] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2075.551277] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0ab9c13-8a94-4b69-bf8b-4d1fc69f64d6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.611853] env[63279]: DEBUG oslo_concurrency.lockutils [None req-22a06ea0-118b-4a5a-86ce-f8246a2ea7e2 tempest-FloatingIPsAssociationTestJSON-1108307968 tempest-FloatingIPsAssociationTestJSON-1108307968-project-member] Lock "357f08c9-4de9-4b84-8384-6bf130872f40" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.652s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.643354] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.686163] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2075.686163] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2075.686163] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Deleting the datastore file [datastore1] 2f5e22f6-ba70-4848-965b-eb1553115323 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2075.686619] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3cad0e6-dba5-40da-92fc-05e1bf6f13c8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.695145] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2075.695145] env[63279]: value = "task-2087423" [ 2075.695145] env[63279]: _type = "Task" [ 2075.695145] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.705519] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087423, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.773060] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087420, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.922598] env[63279]: DEBUG nova.compute.manager [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2075.991885] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087421, 'name': PowerOffVM_Task, 'duration_secs': 0.299873} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.993070] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2075.993358] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2075.994142] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f3f140-0732-4f09-adc1-c488178ec842 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.001313] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2076.001690] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ea2e8ce-d198-42d1-b9ee-e64928def37a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.026009] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2076.026250] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2076.026452] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Deleting the datastore file [datastore1] df963c29-a1c4-4f28-be95-cafe3af4d2fa {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2076.026718] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37ab8b8a-de28-4903-bcd1-335898d09224 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.034895] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2076.034895] env[63279]: value = "task-2087425" [ 2076.034895] env[63279]: _type = "Task" [ 2076.034895] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.048622] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087425, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.215973] env[63279]: DEBUG oslo_vmware.api [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087423, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183788} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.215973] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2076.217026] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2076.217283] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2076.243243] env[63279]: INFO nova.scheduler.client.report [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Deleted allocations for instance 2f5e22f6-ba70-4848-965b-eb1553115323 [ 2076.279305] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087420, 'name': ReconfigVM_Task, 'duration_secs': 0.559071} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.279305] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 4871421f-0015-4973-bb5f-c9042d411c82/4871421f-0015-4973-bb5f-c9042d411c82.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2076.279305] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7bd4e18f-c93a-4567-854e-ddadf8694209 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.292756] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2076.292756] env[63279]: value = "task-2087426" [ 2076.292756] env[63279]: _type = "Task" [ 2076.292756] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.303877] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087426, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.447407] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2076.546284] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087425, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096597} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.548599] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2076.548801] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2076.548982] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2076.592985] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72f1212-b2e8-4353-ac8b-5230a3dd0b62 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.605148] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef97723d-9c6c-427b-81f6-bc5f7f5ae88d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.638477] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de0719f-6cca-4f59-86a2-63592a9cee2d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.645625] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75f19c5-7d62-4474-b4d0-3827cb22f78f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.649966] env[63279]: DEBUG nova.network.neutron [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Port cbbfde33-a0b6-4403-8a1e-d688a0a7147b binding to destination host cpu-1 is already ACTIVE {{(pid=63279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2076.650246] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2076.650411] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2076.650578] env[63279]: DEBUG nova.network.neutron [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2076.662447] env[63279]: DEBUG nova.compute.provider_tree [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2076.750544] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2076.805153] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087426, 'name': Rename_Task, 'duration_secs': 0.144156} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.805153] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2076.805267] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-780244a8-bc65-4fc0-a59c-bb593dfef284 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.817364] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2076.817364] env[63279]: value = "task-2087427" [ 2076.817364] env[63279]: _type = "Task" [ 2076.817364] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.830430] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087427, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.165628] env[63279]: DEBUG nova.scheduler.client.report [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2077.336325] env[63279]: DEBUG oslo_vmware.api [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087427, 'name': PowerOnVM_Task, 'duration_secs': 0.407865} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.337137] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2077.337137] env[63279]: INFO nova.compute.manager [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Took 5.40 seconds to spawn the instance on the hypervisor. [ 2077.337137] env[63279]: DEBUG nova.compute.manager [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2077.337918] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a81f4c-6c63-47be-ac8a-cec8022aeee6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.511748] env[63279]: DEBUG nova.network.neutron [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance_info_cache with network_info: [{"id": "cbbfde33-a0b6-4403-8a1e-d688a0a7147b", "address": "fa:16:3e:47:7e:05", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbbfde33-a0", "ovs_interfaceid": "cbbfde33-a0b6-4403-8a1e-d688a0a7147b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.542586] env[63279]: DEBUG nova.compute.manager [req-b5709274-9685-4065-82ca-b9606e900a9c req-8344359b-c3a7-4a62-8f1e-9b9b4cfa9723 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Received event network-changed-8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2077.542749] env[63279]: DEBUG nova.compute.manager [req-b5709274-9685-4065-82ca-b9606e900a9c req-8344359b-c3a7-4a62-8f1e-9b9b4cfa9723 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Refreshing instance network info cache due to event network-changed-8383ed07-21e7-43e0-82a2-0afc179ca995. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2077.542984] env[63279]: DEBUG oslo_concurrency.lockutils [req-b5709274-9685-4065-82ca-b9606e900a9c req-8344359b-c3a7-4a62-8f1e-9b9b4cfa9723 service nova] Acquiring lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2077.543154] env[63279]: DEBUG oslo_concurrency.lockutils [req-b5709274-9685-4065-82ca-b9606e900a9c req-8344359b-c3a7-4a62-8f1e-9b9b4cfa9723 service nova] Acquired lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.543321] env[63279]: DEBUG nova.network.neutron [req-b5709274-9685-4065-82ca-b9606e900a9c req-8344359b-c3a7-4a62-8f1e-9b9b4cfa9723 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Refreshing network info cache for port 8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2077.586098] env[63279]: DEBUG nova.virt.hardware [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2077.586373] env[63279]: DEBUG nova.virt.hardware [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2077.586549] env[63279]: DEBUG nova.virt.hardware [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2077.586741] env[63279]: DEBUG nova.virt.hardware [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2077.586895] env[63279]: DEBUG nova.virt.hardware [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2077.587069] env[63279]: DEBUG nova.virt.hardware [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2077.587310] env[63279]: DEBUG nova.virt.hardware [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2077.587509] env[63279]: DEBUG nova.virt.hardware [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2077.587655] env[63279]: DEBUG nova.virt.hardware [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2077.587824] env[63279]: DEBUG nova.virt.hardware [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2077.588154] env[63279]: DEBUG nova.virt.hardware [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2077.589354] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3359d427-4600-4dcd-abb3-48458c59f8cb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.597739] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cea29a9-87cf-46a3-b0ed-db633a0c0f2e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.612729] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Instance VIF info [] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2077.618434] env[63279]: DEBUG oslo.service.loopingcall [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2077.618676] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2077.618882] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8fd7de32-0ef7-4954-b6a8-fc4843a66557 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.635104] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2077.635104] env[63279]: value = "task-2087428" [ 2077.635104] env[63279]: _type = "Task" [ 2077.635104] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.642605] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087428, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.670784] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2077.671873] env[63279]: DEBUG nova.compute.manager [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2077.676542] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.426s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.676848] env[63279]: DEBUG nova.objects.instance [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lazy-loading 'resources' on Instance uuid 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2077.858420] env[63279]: INFO nova.compute.manager [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Took 50.08 seconds to build instance. [ 2078.014695] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Releasing lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2078.147604] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087428, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.163745] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "2f5e22f6-ba70-4848-965b-eb1553115323" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.178484] env[63279]: DEBUG nova.compute.utils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2078.192441] env[63279]: DEBUG nova.compute.manager [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2078.192441] env[63279]: DEBUG nova.network.neutron [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2078.250930] env[63279]: DEBUG nova.policy [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c85a9e3055c54a0a824ad8b31916c2c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4be111085794ed1859530a2411be01c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2078.360272] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1005df31-c010-45b4-a217-c9e2af6f76aa tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Lock "4871421f-0015-4973-bb5f-c9042d411c82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.871s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.425127] env[63279]: DEBUG nova.network.neutron [req-b5709274-9685-4065-82ca-b9606e900a9c req-8344359b-c3a7-4a62-8f1e-9b9b4cfa9723 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Updated VIF entry in instance network info cache for port 8383ed07-21e7-43e0-82a2-0afc179ca995. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2078.425476] env[63279]: DEBUG nova.network.neutron [req-b5709274-9685-4065-82ca-b9606e900a9c req-8344359b-c3a7-4a62-8f1e-9b9b4cfa9723 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Updating instance_info_cache with network_info: [{"id": "8383ed07-21e7-43e0-82a2-0afc179ca995", "address": "fa:16:3e:b6:12:ab", "network": {"id": "a7b83e75-3b16-41db-9395-90dead128e80", "bridge": null, "label": "tempest-ServersNegativeTestJSON-756195345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd8bdc7d8664db698236f56d82adcf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap8383ed07-21", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2078.518301] env[63279]: DEBUG nova.compute.manager [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63279) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 2078.518521] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.651275] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087428, 'name': CreateVM_Task, 'duration_secs': 0.651294} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.654043] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2078.654894] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.655389] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.656122] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2078.656122] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dedbcde6-d5f0-49a9-83ac-fd2eb3f2d2dc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.661096] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2078.661096] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5210bf6a-2ed0-4454-5be5-2ce831cc9a18" [ 2078.661096] env[63279]: _type = "Task" [ 2078.661096] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.670651] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5210bf6a-2ed0-4454-5be5-2ce831cc9a18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.697798] env[63279]: DEBUG nova.compute.manager [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2078.715619] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aee8237-2e7a-4c2d-871e-f10681b51b9d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.723785] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbccb5da-2342-4f36-9e80-659c2115b0a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.758377] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423c45fe-44e1-4290-94f1-e276e1783e0b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.766543] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7fa8d03-bc92-4885-a3b0-a46a1dac3c3e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.780281] env[63279]: DEBUG nova.compute.provider_tree [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2078.782040] env[63279]: DEBUG nova.network.neutron [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Successfully created port: 55fb7ecc-3067-4f87-af91-9742fa7d90b1 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2078.871645] env[63279]: INFO nova.compute.manager [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Rebuilding instance [ 2078.918619] env[63279]: DEBUG nova.compute.manager [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2078.919525] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5cf81e-db66-4be4-ac55-64820a2a5838 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.927846] env[63279]: DEBUG oslo_concurrency.lockutils [req-b5709274-9685-4065-82ca-b9606e900a9c req-8344359b-c3a7-4a62-8f1e-9b9b4cfa9723 service nova] Releasing lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.171393] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5210bf6a-2ed0-4454-5be5-2ce831cc9a18, 'name': SearchDatastore_Task, 'duration_secs': 0.022542} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.171701] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.171937] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2079.172209] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2079.172373] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2079.172586] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2079.172850] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f978bace-3d31-4e4e-95b7-b04785146567 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.182577] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2079.182756] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2079.183458] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c7f27d0-1167-409d-b3f5-bb3a36feb2d6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.189329] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2079.189329] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52fd2c14-6804-316e-2a0b-3ccae996da9e" [ 2079.189329] env[63279]: _type = "Task" [ 2079.189329] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.197509] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fd2c14-6804-316e-2a0b-3ccae996da9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.285608] env[63279]: DEBUG nova.scheduler.client.report [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2079.701530] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fd2c14-6804-316e-2a0b-3ccae996da9e, 'name': SearchDatastore_Task, 'duration_secs': 0.036084} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.702394] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3622d1e9-a622-408b-b636-4700a5c335ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.709619] env[63279]: DEBUG nova.compute.manager [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2079.711648] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2079.711648] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5230654e-33cb-7b82-e273-ea9fc66e7a3e" [ 2079.711648] env[63279]: _type = "Task" [ 2079.711648] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.719936] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5230654e-33cb-7b82-e273-ea9fc66e7a3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.742281] env[63279]: DEBUG nova.virt.hardware [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2079.742281] env[63279]: DEBUG nova.virt.hardware [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2079.742281] env[63279]: DEBUG nova.virt.hardware [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2079.742440] env[63279]: DEBUG nova.virt.hardware [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2079.742440] env[63279]: DEBUG nova.virt.hardware [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2079.742440] env[63279]: DEBUG nova.virt.hardware [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2079.742578] env[63279]: DEBUG nova.virt.hardware [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2079.742711] env[63279]: DEBUG nova.virt.hardware [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2079.742877] env[63279]: DEBUG nova.virt.hardware [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2079.743053] env[63279]: DEBUG nova.virt.hardware [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2079.743240] env[63279]: DEBUG nova.virt.hardware [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2079.744117] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097044ba-4071-4175-a964-1921bf3e5f89 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.752127] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d87f460-1526-445e-8350-f6cc6c3132b0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.791580] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.115s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2079.793897] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.577s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2079.794137] env[63279]: DEBUG nova.objects.instance [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lazy-loading 'resources' on Instance uuid 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2079.823788] env[63279]: INFO nova.scheduler.client.report [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleted allocations for instance 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39 [ 2079.939597] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2079.939932] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-872ba439-c62d-43bf-84aa-4cb5ff5f9b9e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.947896] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2079.947896] env[63279]: value = "task-2087429" [ 2079.947896] env[63279]: _type = "Task" [ 2079.947896] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.958755] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087429, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.223640] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5230654e-33cb-7b82-e273-ea9fc66e7a3e, 'name': SearchDatastore_Task, 'duration_secs': 0.012001} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.223917] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2080.224205] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] df963c29-a1c4-4f28-be95-cafe3af4d2fa/df963c29-a1c4-4f28-be95-cafe3af4d2fa.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2080.224481] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f840f3d4-b34c-431c-a538-567f42a733ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.230847] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2080.230847] env[63279]: value = "task-2087430" [ 2080.230847] env[63279]: _type = "Task" [ 2080.230847] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.238416] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087430, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.330907] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1dd5613f-21e4-4e2c-ac2b-68bf8f869556 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "64e92bfc-c0d0-4918-9ba2-45ffedbf7e39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.655s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.463904] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087429, 'name': PowerOffVM_Task, 'duration_secs': 0.14074} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.464386] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2080.464813] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2080.466262] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc54faf-09b3-41b7-b248-75d00c02898e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.480377] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2080.480758] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b796794-f3dc-473e-9321-da9b1774d983 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.487963] env[63279]: DEBUG nova.compute.manager [req-81bfe97a-1e81-4785-af66-b784368e260f req-f8031ac5-ec6a-4e6b-a022-9ad8a79c8b9d service nova] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Received event network-vif-plugged-55fb7ecc-3067-4f87-af91-9742fa7d90b1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2080.488328] env[63279]: DEBUG oslo_concurrency.lockutils [req-81bfe97a-1e81-4785-af66-b784368e260f req-f8031ac5-ec6a-4e6b-a022-9ad8a79c8b9d service nova] Acquiring lock "246f0945-7290-4cb7-a982-b17cb1573002-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.488694] env[63279]: DEBUG oslo_concurrency.lockutils [req-81bfe97a-1e81-4785-af66-b784368e260f req-f8031ac5-ec6a-4e6b-a022-9ad8a79c8b9d service nova] Lock "246f0945-7290-4cb7-a982-b17cb1573002-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.488972] env[63279]: DEBUG oslo_concurrency.lockutils [req-81bfe97a-1e81-4785-af66-b784368e260f req-f8031ac5-ec6a-4e6b-a022-9ad8a79c8b9d service nova] Lock "246f0945-7290-4cb7-a982-b17cb1573002-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.489292] env[63279]: DEBUG nova.compute.manager [req-81bfe97a-1e81-4785-af66-b784368e260f req-f8031ac5-ec6a-4e6b-a022-9ad8a79c8b9d service nova] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] No waiting events found dispatching network-vif-plugged-55fb7ecc-3067-4f87-af91-9742fa7d90b1 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2080.489712] env[63279]: WARNING nova.compute.manager [req-81bfe97a-1e81-4785-af66-b784368e260f req-f8031ac5-ec6a-4e6b-a022-9ad8a79c8b9d service nova] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Received unexpected event network-vif-plugged-55fb7ecc-3067-4f87-af91-9742fa7d90b1 for instance with vm_state building and task_state spawning. [ 2080.524178] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2080.524317] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2080.524490] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Deleting the datastore file [datastore1] 4871421f-0015-4973-bb5f-c9042d411c82 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2080.524869] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72070b2d-74db-47a1-b4c5-177b91ecc95e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.533271] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2080.533271] env[63279]: value = "task-2087432" [ 2080.533271] env[63279]: _type = "Task" [ 2080.533271] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.553923] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087432, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.750268] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087430, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.846722] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169c3527-6b04-4fcf-94cb-d77ab4f33d77 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.856799] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c328187-3e25-4a6e-b663-495ecdf871f8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.891611] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b9302a-e660-4368-a460-296d84b66800 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.899608] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe3b3b6-299c-47b2-b938-8e1b31751a4e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.904338] env[63279]: DEBUG nova.network.neutron [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Successfully updated port: 55fb7ecc-3067-4f87-af91-9742fa7d90b1 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2080.916469] env[63279]: DEBUG nova.compute.provider_tree [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2081.045353] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087432, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389491} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.045458] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2081.045631] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2081.046097] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2081.245281] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087430, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608473} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.245574] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] df963c29-a1c4-4f28-be95-cafe3af4d2fa/df963c29-a1c4-4f28-be95-cafe3af4d2fa.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2081.245792] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2081.246066] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a25ae7e5-de22-48a6-9c9a-72cd2066e11c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.253559] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2081.253559] env[63279]: value = "task-2087433" [ 2081.253559] env[63279]: _type = "Task" [ 2081.253559] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.262080] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087433, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.409326] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Acquiring lock "refresh_cache-246f0945-7290-4cb7-a982-b17cb1573002" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2081.409326] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Acquired lock "refresh_cache-246f0945-7290-4cb7-a982-b17cb1573002" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2081.409326] env[63279]: DEBUG nova.network.neutron [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2081.420182] env[63279]: DEBUG nova.scheduler.client.report [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2081.764650] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087433, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105666} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.764929] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2081.766496] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864d477d-537f-412b-af01-67a0c95fdb30 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.791061] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] df963c29-a1c4-4f28-be95-cafe3af4d2fa/df963c29-a1c4-4f28-be95-cafe3af4d2fa.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2081.791061] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fff330b-62e1-4c4f-8521-77b6f11b0803 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.816070] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2081.816070] env[63279]: value = "task-2087434" [ 2081.816070] env[63279]: _type = "Task" [ 2081.816070] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.825099] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087434, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.842070] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "4ce17bdb-4bed-4e06-af13-e4097b55e17d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.842324] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "4ce17bdb-4bed-4e06-af13-e4097b55e17d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.926394] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.132s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.929127] env[63279]: DEBUG oslo_concurrency.lockutils [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 40.437s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.929127] env[63279]: DEBUG nova.objects.instance [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2081.942850] env[63279]: DEBUG nova.network.neutron [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2081.949059] env[63279]: INFO nova.scheduler.client.report [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Deleted allocations for instance 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee [ 2082.083727] env[63279]: DEBUG nova.virt.hardware [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2082.083978] env[63279]: DEBUG nova.virt.hardware [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2082.084155] env[63279]: DEBUG nova.virt.hardware [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2082.084345] env[63279]: DEBUG nova.virt.hardware [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2082.084498] env[63279]: DEBUG nova.virt.hardware [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2082.084648] env[63279]: DEBUG nova.virt.hardware [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2082.084858] env[63279]: DEBUG nova.virt.hardware [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2082.085032] env[63279]: DEBUG nova.virt.hardware [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2082.085210] env[63279]: DEBUG nova.virt.hardware [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2082.085381] env[63279]: DEBUG nova.virt.hardware [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2082.085557] env[63279]: DEBUG nova.virt.hardware [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2082.086560] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f85bfd-ade0-419c-98f2-4871d462dc38 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.095599] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa4e0e5-3687-4f45-aba3-d4df32c84053 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.114662] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Instance VIF info [] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2082.121898] env[63279]: DEBUG oslo.service.loopingcall [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2082.123025] env[63279]: DEBUG nova.network.neutron [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Updating instance_info_cache with network_info: [{"id": "55fb7ecc-3067-4f87-af91-9742fa7d90b1", "address": "fa:16:3e:c9:0a:93", "network": {"id": "583be367-206f-41d9-a0c2-e8f63f032a3a", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1847983531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4be111085794ed1859530a2411be01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89463199-7ddf-4ee7-b485-1629a75b4b8f", "external-id": "nsx-vlan-transportzone-302", "segmentation_id": 302, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55fb7ecc-30", "ovs_interfaceid": "55fb7ecc-3067-4f87-af91-9742fa7d90b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2082.124418] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2082.124614] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6aa78a51-d88c-4fb9-aa94-8be9ef6c5664 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.143328] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2082.143328] env[63279]: value = "task-2087435" [ 2082.143328] env[63279]: _type = "Task" [ 2082.143328] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.151614] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087435, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.327978] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087434, 'name': ReconfigVM_Task, 'duration_secs': 0.301464} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.331799] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Reconfigured VM instance instance-00000041 to attach disk [datastore1] df963c29-a1c4-4f28-be95-cafe3af4d2fa/df963c29-a1c4-4f28-be95-cafe3af4d2fa.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2082.331799] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ac0348c-8575-4d9b-8cfe-388d5e152f54 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.342268] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2082.342268] env[63279]: value = "task-2087436" [ 2082.342268] env[63279]: _type = "Task" [ 2082.342268] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.344091] env[63279]: DEBUG nova.compute.manager [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2082.354849] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087436, 'name': Rename_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.448052] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "a6758131-030e-4b33-a2c9-8864055a5bec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2082.448052] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "a6758131-030e-4b33-a2c9-8864055a5bec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2082.460356] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9f005d43-c531-46f0-8304-6f1be0a2b60b tempest-ListImageFiltersTestJSON-386698447 tempest-ListImageFiltersTestJSON-386698447-project-member] Lock "5bb445d3-1b12-4a1b-ad2a-cbc929b13aee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.281s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2082.625517] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Releasing lock "refresh_cache-246f0945-7290-4cb7-a982-b17cb1573002" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2082.625864] env[63279]: DEBUG nova.compute.manager [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Instance network_info: |[{"id": "55fb7ecc-3067-4f87-af91-9742fa7d90b1", "address": "fa:16:3e:c9:0a:93", "network": {"id": "583be367-206f-41d9-a0c2-e8f63f032a3a", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1847983531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4be111085794ed1859530a2411be01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89463199-7ddf-4ee7-b485-1629a75b4b8f", "external-id": "nsx-vlan-transportzone-302", "segmentation_id": 302, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55fb7ecc-30", "ovs_interfaceid": "55fb7ecc-3067-4f87-af91-9742fa7d90b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2082.626317] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:0a:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89463199-7ddf-4ee7-b485-1629a75b4b8f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55fb7ecc-3067-4f87-af91-9742fa7d90b1', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2082.634121] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Creating folder: Project (f4be111085794ed1859530a2411be01c). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2082.634435] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87adb570-37ce-4ca9-89da-da61f951f38a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.641629] env[63279]: DEBUG nova.compute.manager [req-a24767ea-257c-4255-8d24-4226c826f396 req-f2940349-6480-4ae4-997e-0ca342ba92aa service nova] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Received event network-changed-55fb7ecc-3067-4f87-af91-9742fa7d90b1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2082.641790] env[63279]: DEBUG nova.compute.manager [req-a24767ea-257c-4255-8d24-4226c826f396 req-f2940349-6480-4ae4-997e-0ca342ba92aa service nova] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Refreshing instance network info cache due to event network-changed-55fb7ecc-3067-4f87-af91-9742fa7d90b1. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2082.642025] env[63279]: DEBUG oslo_concurrency.lockutils [req-a24767ea-257c-4255-8d24-4226c826f396 req-f2940349-6480-4ae4-997e-0ca342ba92aa service nova] Acquiring lock "refresh_cache-246f0945-7290-4cb7-a982-b17cb1573002" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2082.642167] env[63279]: DEBUG oslo_concurrency.lockutils [req-a24767ea-257c-4255-8d24-4226c826f396 req-f2940349-6480-4ae4-997e-0ca342ba92aa service nova] Acquired lock "refresh_cache-246f0945-7290-4cb7-a982-b17cb1573002" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2082.642329] env[63279]: DEBUG nova.network.neutron [req-a24767ea-257c-4255-8d24-4226c826f396 req-f2940349-6480-4ae4-997e-0ca342ba92aa service nova] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Refreshing network info cache for port 55fb7ecc-3067-4f87-af91-9742fa7d90b1 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2082.648758] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Created folder: Project (f4be111085794ed1859530a2411be01c) in parent group-v427491. [ 2082.648955] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Creating folder: Instances. Parent ref: group-v427675. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2082.649710] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4caac505-3390-40ea-966b-c9be2dba4d70 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.657142] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087435, 'name': CreateVM_Task, 'duration_secs': 0.289136} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.657331] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2082.657727] env[63279]: DEBUG oslo_concurrency.lockutils [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2082.657890] env[63279]: DEBUG oslo_concurrency.lockutils [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2082.658218] env[63279]: DEBUG oslo_concurrency.lockutils [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2082.659572] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4578ae1-2060-439f-a05c-7bcb2fba22f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.661039] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Created folder: Instances in parent group-v427675. [ 2082.661271] env[63279]: DEBUG oslo.service.loopingcall [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2082.661693] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2082.662211] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5542ebb-8cc5-490e-973c-6e051cee278f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.679161] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2082.679161] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52712247-e848-615b-f9be-b8ed9edacea6" [ 2082.679161] env[63279]: _type = "Task" [ 2082.679161] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.684315] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2082.684315] env[63279]: value = "task-2087439" [ 2082.684315] env[63279]: _type = "Task" [ 2082.684315] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.692427] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52712247-e848-615b-f9be-b8ed9edacea6, 'name': SearchDatastore_Task, 'duration_secs': 0.015717} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.692427] env[63279]: DEBUG oslo_concurrency.lockutils [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2082.692427] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2082.692427] env[63279]: DEBUG oslo_concurrency.lockutils [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2082.692585] env[63279]: DEBUG oslo_concurrency.lockutils [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2082.692585] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2082.692652] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5326c0b6-069c-41b7-b3c2-ac9b3ea8a47d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.697850] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087439, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.704870] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2082.705079] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2082.705806] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25684b48-d0b6-4dc9-b509-68dee3bfaee4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.710949] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2082.710949] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529f63be-aa85-e89d-2e0d-66a7f6612ac8" [ 2082.710949] env[63279]: _type = "Task" [ 2082.710949] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.721386] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529f63be-aa85-e89d-2e0d-66a7f6612ac8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.851896] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087436, 'name': Rename_Task, 'duration_secs': 0.138852} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.852560] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2082.854975] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43b3ceb1-8d56-45be-8613-f0a6d2a38862 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.861759] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2082.861759] env[63279]: value = "task-2087440" [ 2082.861759] env[63279]: _type = "Task" [ 2082.861759] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.870504] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087440, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.871571] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2082.942629] env[63279]: DEBUG oslo_concurrency.lockutils [None req-147d4716-cd46-48dd-a8ea-f7700d3313fb tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2082.944549] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.460s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2082.946270] env[63279]: INFO nova.compute.claims [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2082.949647] env[63279]: DEBUG nova.compute.manager [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2083.196391] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087439, 'name': CreateVM_Task, 'duration_secs': 0.376637} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.196578] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2083.197298] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2083.197466] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2083.197782] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2083.198040] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c5bd639-a0ac-44a9-ab02-140bb5bcc864 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.203023] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Waiting for the task: (returnval){ [ 2083.203023] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]525cd340-a182-8019-975d-b78cc4292fb2" [ 2083.203023] env[63279]: _type = "Task" [ 2083.203023] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.214057] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525cd340-a182-8019-975d-b78cc4292fb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.221855] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529f63be-aa85-e89d-2e0d-66a7f6612ac8, 'name': SearchDatastore_Task, 'duration_secs': 0.012291} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.223133] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48aabdca-87d7-4245-b599-1e465c9d797b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.228602] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2083.228602] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52775efb-ca19-e687-5e64-d2a6d972b2e6" [ 2083.228602] env[63279]: _type = "Task" [ 2083.228602] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.235571] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52775efb-ca19-e687-5e64-d2a6d972b2e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.371897] env[63279]: DEBUG oslo_vmware.api [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087440, 'name': PowerOnVM_Task, 'duration_secs': 0.429378} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.372196] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2083.372410] env[63279]: DEBUG nova.compute.manager [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2083.373185] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbdeb15f-def2-4fdb-a952-3b1d7851d2aa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.479834] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2083.535918] env[63279]: DEBUG nova.network.neutron [req-a24767ea-257c-4255-8d24-4226c826f396 req-f2940349-6480-4ae4-997e-0ca342ba92aa service nova] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Updated VIF entry in instance network info cache for port 55fb7ecc-3067-4f87-af91-9742fa7d90b1. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2083.535918] env[63279]: DEBUG nova.network.neutron [req-a24767ea-257c-4255-8d24-4226c826f396 req-f2940349-6480-4ae4-997e-0ca342ba92aa service nova] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Updating instance_info_cache with network_info: [{"id": "55fb7ecc-3067-4f87-af91-9742fa7d90b1", "address": "fa:16:3e:c9:0a:93", "network": {"id": "583be367-206f-41d9-a0c2-e8f63f032a3a", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1847983531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4be111085794ed1859530a2411be01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89463199-7ddf-4ee7-b485-1629a75b4b8f", "external-id": "nsx-vlan-transportzone-302", "segmentation_id": 302, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55fb7ecc-30", "ovs_interfaceid": "55fb7ecc-3067-4f87-af91-9742fa7d90b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2083.714125] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525cd340-a182-8019-975d-b78cc4292fb2, 'name': SearchDatastore_Task, 'duration_secs': 0.02684} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.714125] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2083.714125] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2083.714125] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2083.739062] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52775efb-ca19-e687-5e64-d2a6d972b2e6, 'name': SearchDatastore_Task, 'duration_secs': 0.024087} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.739495] env[63279]: DEBUG oslo_concurrency.lockutils [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2083.739915] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 4871421f-0015-4973-bb5f-c9042d411c82/4871421f-0015-4973-bb5f-c9042d411c82.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2083.743019] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2083.743019] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2083.743019] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7547cdf2-2331-4f9f-a14b-e9505ecd42ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.743019] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03d904ae-e7fe-4a46-80f3-cdee8407641d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.749923] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2083.749923] env[63279]: value = "task-2087441" [ 2083.749923] env[63279]: _type = "Task" [ 2083.749923] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.753826] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2083.754165] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2083.755233] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-437bc284-498b-4ca6-98bd-d7b4d1ef54e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.761509] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087441, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.767017] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Waiting for the task: (returnval){ [ 2083.767017] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52369bff-a6c8-c10c-3c46-85f67a176e5a" [ 2083.767017] env[63279]: _type = "Task" [ 2083.767017] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.776025] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52369bff-a6c8-c10c-3c46-85f67a176e5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.895457] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.039080] env[63279]: DEBUG oslo_concurrency.lockutils [req-a24767ea-257c-4255-8d24-4226c826f396 req-f2940349-6480-4ae4-997e-0ca342ba92aa service nova] Releasing lock "refresh_cache-246f0945-7290-4cb7-a982-b17cb1573002" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2084.263883] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087441, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.281770] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52369bff-a6c8-c10c-3c46-85f67a176e5a, 'name': SearchDatastore_Task, 'duration_secs': 0.019035} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.282668] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1efeb8e0-6812-40a3-a7ee-70adcc81bad7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.292860] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Waiting for the task: (returnval){ [ 2084.292860] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5216e131-7cdd-d8dd-8e45-7ca1096746ff" [ 2084.292860] env[63279]: _type = "Task" [ 2084.292860] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.302582] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5216e131-7cdd-d8dd-8e45-7ca1096746ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.438892] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c13b75-bd30-46e5-a0d3-d6a0c0260268 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.448705] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c96c14-ca65-4452-9990-bd485bb31977 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.482958] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2612e416-541b-4b97-8423-8994109567c8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.489898] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c54e52-ab5a-4d91-97f8-fc4195a962d5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.506835] env[63279]: DEBUG nova.compute.provider_tree [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2084.728491] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "df963c29-a1c4-4f28-be95-cafe3af4d2fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.728591] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "df963c29-a1c4-4f28-be95-cafe3af4d2fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.728781] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "df963c29-a1c4-4f28-be95-cafe3af4d2fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.728942] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "df963c29-a1c4-4f28-be95-cafe3af4d2fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.729138] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "df963c29-a1c4-4f28-be95-cafe3af4d2fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2084.731725] env[63279]: INFO nova.compute.manager [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Terminating instance [ 2084.760768] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087441, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.687731} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.761039] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 4871421f-0015-4973-bb5f-c9042d411c82/4871421f-0015-4973-bb5f-c9042d411c82.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2084.761338] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2084.761668] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9aafef40-f72e-4f6e-97a9-618a970cb1e6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.767421] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2084.767421] env[63279]: value = "task-2087442" [ 2084.767421] env[63279]: _type = "Task" [ 2084.767421] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.774826] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087442, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.804144] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5216e131-7cdd-d8dd-8e45-7ca1096746ff, 'name': SearchDatastore_Task, 'duration_secs': 0.052213} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.804560] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2084.804771] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 246f0945-7290-4cb7-a982-b17cb1573002/246f0945-7290-4cb7-a982-b17cb1573002.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2084.805047] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ed2f21b-a186-4f8e-8a0e-79406b17fc85 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.811977] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Waiting for the task: (returnval){ [ 2084.811977] env[63279]: value = "task-2087443" [ 2084.811977] env[63279]: _type = "Task" [ 2084.811977] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.820075] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.010655] env[63279]: DEBUG nova.scheduler.client.report [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2085.235919] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "refresh_cache-df963c29-a1c4-4f28-be95-cafe3af4d2fa" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2085.235919] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquired lock "refresh_cache-df963c29-a1c4-4f28-be95-cafe3af4d2fa" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2085.235919] env[63279]: DEBUG nova.network.neutron [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2085.278785] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087442, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063522} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.279170] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2085.280322] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0125af-f7fb-472c-bca1-39f16e1dd735 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.305795] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 4871421f-0015-4973-bb5f-c9042d411c82/4871421f-0015-4973-bb5f-c9042d411c82.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2085.306208] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a1820d6-f938-4090-837b-af8271e2de24 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.336078] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087443, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.338066] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2085.338066] env[63279]: value = "task-2087444" [ 2085.338066] env[63279]: _type = "Task" [ 2085.338066] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.347310] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087444, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.517150] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.572s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2085.517689] env[63279]: DEBUG nova.compute.manager [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2085.520480] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 38.802s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.520686] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2085.520849] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2085.521150] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.365s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.521358] env[63279]: DEBUG nova.objects.instance [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lazy-loading 'resources' on Instance uuid 58392790-b297-4894-8d81-e5cbda69872b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2085.523250] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239a80db-0ee1-49f2-9f91-8982134da873 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.533267] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6d006b-02c9-4515-a226-76bf1dd494f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.555020] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466b4b83-ac66-481d-a48c-93598698da4c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.560894] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f08f98-b5ae-4a9b-b9f9-e89181928423 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.594099] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179027MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2085.594316] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.764211] env[63279]: DEBUG nova.network.neutron [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2085.837025] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087443, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566064} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.837025] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 246f0945-7290-4cb7-a982-b17cb1573002/246f0945-7290-4cb7-a982-b17cb1573002.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2085.837025] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2085.837025] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ebb9e327-45a2-455c-beb1-5abf9e5a480f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.843796] env[63279]: DEBUG nova.network.neutron [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2085.845102] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Waiting for the task: (returnval){ [ 2085.845102] env[63279]: value = "task-2087445" [ 2085.845102] env[63279]: _type = "Task" [ 2085.845102] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.855473] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087444, 'name': ReconfigVM_Task, 'duration_secs': 0.498913} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.855473] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 4871421f-0015-4973-bb5f-c9042d411c82/4871421f-0015-4973-bb5f-c9042d411c82.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2085.855473] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df52faf4-2914-450a-b7c5-461b33d9def5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.858028] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087445, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.863046] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2085.863046] env[63279]: value = "task-2087446" [ 2085.863046] env[63279]: _type = "Task" [ 2085.863046] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.872668] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087446, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.031787] env[63279]: DEBUG nova.compute.utils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2086.039119] env[63279]: DEBUG nova.compute.manager [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2086.039119] env[63279]: DEBUG nova.network.neutron [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2086.122125] env[63279]: DEBUG nova.policy [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f7d00ef2af04a9fafe3b49eb0319ef8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8964759ce46d43998d52095a843dbc2c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2086.346892] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Releasing lock "refresh_cache-df963c29-a1c4-4f28-be95-cafe3af4d2fa" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2086.347387] env[63279]: DEBUG nova.compute.manager [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2086.347583] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2086.353183] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137b4176-5b1c-4cc5-a405-35bbb397f78e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.362773] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2086.365644] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78e5bace-07b9-4963-9623-bf90244ade4a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.369859] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087445, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067918} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.370357] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2086.374445] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42408d7e-f844-472e-ade5-65f669a32276 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.380775] env[63279]: DEBUG oslo_vmware.api [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2086.380775] env[63279]: value = "task-2087447" [ 2086.380775] env[63279]: _type = "Task" [ 2086.380775] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.389940] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087446, 'name': Rename_Task, 'duration_secs': 0.199075} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.398530] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 246f0945-7290-4cb7-a982-b17cb1573002/246f0945-7290-4cb7-a982-b17cb1573002.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2086.405303] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2086.405568] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c24e5c87-444b-40bd-9f6d-c6b73bc19942 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.422455] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5185def7-0a9c-45f8-af0e-d1fef6a7a4c8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.429887] env[63279]: DEBUG oslo_vmware.api [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087447, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.432157] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Waiting for the task: (returnval){ [ 2086.432157] env[63279]: value = "task-2087449" [ 2086.432157] env[63279]: _type = "Task" [ 2086.432157] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.432441] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2086.432441] env[63279]: value = "task-2087448" [ 2086.432441] env[63279]: _type = "Task" [ 2086.432441] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.446933] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087449, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.447074] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087448, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.542502] env[63279]: DEBUG nova.compute.manager [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2086.637233] env[63279]: DEBUG nova.network.neutron [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Successfully created port: 2cc659c6-5797-4426-a92a-3924cc611395 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2086.638830] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f05af8-d560-4c45-a272-eb5d3e0a5f44 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.647233] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf80c00-9e63-443b-867d-e1b9729586d5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.684065] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18616d15-19a7-4f2c-a3a8-a04bae7813e3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.691913] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db35f465-696b-4ac5-9e50-0b8b6b60eb23 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.711026] env[63279]: DEBUG nova.compute.provider_tree [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2086.902022] env[63279]: DEBUG oslo_vmware.api [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087447, 'name': PowerOffVM_Task, 'duration_secs': 0.165339} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.902333] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2086.902514] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2086.902781] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb43afa1-ed9e-41cc-a18b-2677b7a69b40 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.930038] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2086.930273] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2086.930466] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Deleting the datastore file [datastore1] df963c29-a1c4-4f28-be95-cafe3af4d2fa {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2086.930734] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93903328-fed2-4b94-a95f-4109b491a318 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.940427] env[63279]: DEBUG oslo_vmware.api [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2086.940427] env[63279]: value = "task-2087451" [ 2086.940427] env[63279]: _type = "Task" [ 2086.940427] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.949369] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087449, 'name': ReconfigVM_Task, 'duration_secs': 0.277224} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.949579] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087448, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.950281] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 246f0945-7290-4cb7-a982-b17cb1573002/246f0945-7290-4cb7-a982-b17cb1573002.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2086.950922] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2a3c043-6a7c-46e6-915f-5432e40d4fb5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.955187] env[63279]: DEBUG oslo_vmware.api [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087451, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.960087] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Waiting for the task: (returnval){ [ 2086.960087] env[63279]: value = "task-2087452" [ 2086.960087] env[63279]: _type = "Task" [ 2086.960087] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.968593] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087452, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.213950] env[63279]: DEBUG nova.scheduler.client.report [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2087.443956] env[63279]: DEBUG oslo_vmware.api [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087448, 'name': PowerOnVM_Task, 'duration_secs': 0.554121} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.447330] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2087.447548] env[63279]: DEBUG nova.compute.manager [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2087.448368] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e0fab1-3fba-4dcc-9f34-f28eb8397163 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.460773] env[63279]: DEBUG oslo_vmware.api [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087451, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207157} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.462366] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2087.462555] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2087.462745] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2087.462921] env[63279]: INFO nova.compute.manager [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2087.463162] env[63279]: DEBUG oslo.service.loopingcall [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2087.466317] env[63279]: DEBUG nova.compute.manager [-] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2087.466421] env[63279]: DEBUG nova.network.neutron [-] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2087.476441] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087452, 'name': Rename_Task, 'duration_secs': 0.140026} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.476704] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2087.476933] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9afea8b2-106e-4a04-9080-c95ade6177ce {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.483882] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Waiting for the task: (returnval){ [ 2087.483882] env[63279]: value = "task-2087456" [ 2087.483882] env[63279]: _type = "Task" [ 2087.483882] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2087.491757] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087456, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.492945] env[63279]: DEBUG nova.network.neutron [-] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2087.554313] env[63279]: DEBUG nova.compute.manager [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2087.582617] env[63279]: DEBUG nova.virt.hardware [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2087.582956] env[63279]: DEBUG nova.virt.hardware [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2087.583028] env[63279]: DEBUG nova.virt.hardware [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2087.583215] env[63279]: DEBUG nova.virt.hardware [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2087.583364] env[63279]: DEBUG nova.virt.hardware [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2087.583516] env[63279]: DEBUG nova.virt.hardware [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2087.583784] env[63279]: DEBUG nova.virt.hardware [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2087.583885] env[63279]: DEBUG nova.virt.hardware [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2087.584063] env[63279]: DEBUG nova.virt.hardware [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2087.584233] env[63279]: DEBUG nova.virt.hardware [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2087.584412] env[63279]: DEBUG nova.virt.hardware [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2087.585307] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0f90d8-c998-4148-889e-d814775b1962 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.595311] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147e6048-58ae-4a03-be83-1252a0713cd2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.721020] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.198s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.721497] env[63279]: DEBUG oslo_concurrency.lockutils [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.459s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.723362] env[63279]: INFO nova.compute.claims [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2087.745115] env[63279]: INFO nova.scheduler.client.report [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleted allocations for instance 58392790-b297-4894-8d81-e5cbda69872b [ 2087.968806] env[63279]: DEBUG oslo_concurrency.lockutils [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2087.997400] env[63279]: DEBUG nova.network.neutron [-] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2087.998539] env[63279]: DEBUG oslo_vmware.api [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087456, 'name': PowerOnVM_Task, 'duration_secs': 0.444977} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.999463] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2087.999463] env[63279]: INFO nova.compute.manager [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Took 8.29 seconds to spawn the instance on the hypervisor. [ 2087.999463] env[63279]: DEBUG nova.compute.manager [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2088.001065] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bea2623-5990-49e9-a9b2-4c1a26d1df4a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.181698] env[63279]: DEBUG nova.compute.manager [req-d51a29d8-952e-476f-aacd-68b3156426fd req-aeb88189-a715-4ba9-ac9e-435d67795d19 service nova] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Received event network-vif-plugged-2cc659c6-5797-4426-a92a-3924cc611395 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2088.181924] env[63279]: DEBUG oslo_concurrency.lockutils [req-d51a29d8-952e-476f-aacd-68b3156426fd req-aeb88189-a715-4ba9-ac9e-435d67795d19 service nova] Acquiring lock "c8b42e3b-b841-4b79-a4f3-ef62577d4902-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2088.182224] env[63279]: DEBUG oslo_concurrency.lockutils [req-d51a29d8-952e-476f-aacd-68b3156426fd req-aeb88189-a715-4ba9-ac9e-435d67795d19 service nova] Lock "c8b42e3b-b841-4b79-a4f3-ef62577d4902-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2088.182413] env[63279]: DEBUG oslo_concurrency.lockutils [req-d51a29d8-952e-476f-aacd-68b3156426fd req-aeb88189-a715-4ba9-ac9e-435d67795d19 service nova] Lock "c8b42e3b-b841-4b79-a4f3-ef62577d4902-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.182608] env[63279]: DEBUG nova.compute.manager [req-d51a29d8-952e-476f-aacd-68b3156426fd req-aeb88189-a715-4ba9-ac9e-435d67795d19 service nova] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] No waiting events found dispatching network-vif-plugged-2cc659c6-5797-4426-a92a-3924cc611395 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2088.182742] env[63279]: WARNING nova.compute.manager [req-d51a29d8-952e-476f-aacd-68b3156426fd req-aeb88189-a715-4ba9-ac9e-435d67795d19 service nova] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Received unexpected event network-vif-plugged-2cc659c6-5797-4426-a92a-3924cc611395 for instance with vm_state building and task_state spawning. [ 2088.254855] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f8f6e0d5-0b27-4027-979b-11e64063ee2d tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "58392790-b297-4894-8d81-e5cbda69872b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.052s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.315069] env[63279]: DEBUG nova.network.neutron [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Successfully updated port: 2cc659c6-5797-4426-a92a-3924cc611395 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2088.377575] env[63279]: DEBUG oslo_concurrency.lockutils [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "c1ac4af5-b01e-4175-844f-7a67b2ef7526" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2088.377808] env[63279]: DEBUG oslo_concurrency.lockutils [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "c1ac4af5-b01e-4175-844f-7a67b2ef7526" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2088.378039] env[63279]: DEBUG oslo_concurrency.lockutils [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "c1ac4af5-b01e-4175-844f-7a67b2ef7526-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2088.378719] env[63279]: DEBUG oslo_concurrency.lockutils [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "c1ac4af5-b01e-4175-844f-7a67b2ef7526-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2088.378719] env[63279]: DEBUG oslo_concurrency.lockutils [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "c1ac4af5-b01e-4175-844f-7a67b2ef7526-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.380439] env[63279]: INFO nova.compute.manager [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Terminating instance [ 2088.500627] env[63279]: INFO nova.compute.manager [-] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Took 1.03 seconds to deallocate network for instance. [ 2088.524960] env[63279]: INFO nova.compute.manager [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Took 52.74 seconds to build instance. [ 2088.820493] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Acquiring lock "refresh_cache-c8b42e3b-b841-4b79-a4f3-ef62577d4902" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2088.820493] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Acquired lock "refresh_cache-c8b42e3b-b841-4b79-a4f3-ef62577d4902" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2088.820493] env[63279]: DEBUG nova.network.neutron [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2088.884933] env[63279]: DEBUG nova.compute.manager [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2088.885173] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2088.886069] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d298ef2b-5694-4492-95e2-9a19278c9bf0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.893529] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2088.895839] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ce65d17-e7eb-4fa4-9521-e7f7b012eaed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.901846] env[63279]: DEBUG oslo_vmware.api [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2088.901846] env[63279]: value = "task-2087457" [ 2088.901846] env[63279]: _type = "Task" [ 2088.901846] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.911067] env[63279]: DEBUG oslo_vmware.api [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087457, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.991220] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Acquiring lock "246f0945-7290-4cb7-a982-b17cb1573002" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2089.006634] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2089.026986] env[63279]: DEBUG oslo_concurrency.lockutils [None req-09490af0-3c5b-4db1-a4ea-0615d5cbf91e tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Lock "246f0945-7290-4cb7-a982-b17cb1573002" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.360s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.027345] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Lock "246f0945-7290-4cb7-a982-b17cb1573002" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.036s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2089.027611] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Acquiring lock "246f0945-7290-4cb7-a982-b17cb1573002-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2089.027869] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Lock "246f0945-7290-4cb7-a982-b17cb1573002-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2089.028092] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Lock "246f0945-7290-4cb7-a982-b17cb1573002-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.030778] env[63279]: INFO nova.compute.manager [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Terminating instance [ 2089.058901] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquiring lock "4871421f-0015-4973-bb5f-c9042d411c82" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2089.059166] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Lock "4871421f-0015-4973-bb5f-c9042d411c82" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2089.059556] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquiring lock "4871421f-0015-4973-bb5f-c9042d411c82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2089.059732] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Lock "4871421f-0015-4973-bb5f-c9042d411c82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2089.059928] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Lock "4871421f-0015-4973-bb5f-c9042d411c82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.064999] env[63279]: INFO nova.compute.manager [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Terminating instance [ 2089.142452] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb91583-b6ba-4cc9-b177-dbb624f730e1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.150681] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e77fbdc-dbf3-4a64-bc06-f2742e0f7b6c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.180857] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4abd14-a1b5-496f-bc92-74dd5a3cdb08 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.188636] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff435a6-c469-46bb-afc2-5d1bbf0ed801 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.203726] env[63279]: DEBUG nova.compute.provider_tree [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2089.356144] env[63279]: DEBUG nova.network.neutron [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2089.413364] env[63279]: DEBUG oslo_vmware.api [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087457, 'name': PowerOffVM_Task, 'duration_secs': 0.190134} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2089.416378] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2089.416642] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2089.416952] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13a1d4cd-70b5-4d2e-99a7-4f811f10836f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.535725] env[63279]: DEBUG nova.network.neutron [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Updating instance_info_cache with network_info: [{"id": "2cc659c6-5797-4426-a92a-3924cc611395", "address": "fa:16:3e:f4:07:b5", "network": {"id": "01008afc-f9d7-497e-9566-78b239e4fb8c", "bridge": "br-int", "label": "tempest-ServersTestJSON-523576344-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8964759ce46d43998d52095a843dbc2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc659c6-57", "ovs_interfaceid": "2cc659c6-5797-4426-a92a-3924cc611395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2089.537533] env[63279]: DEBUG nova.compute.manager [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2089.537805] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2089.538953] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8645e581-adca-4683-a3ee-d54460ca9510 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.546752] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2089.547519] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ec046af-8a87-499c-88a4-4e0b2443491a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.554164] env[63279]: DEBUG oslo_vmware.api [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Waiting for the task: (returnval){ [ 2089.554164] env[63279]: value = "task-2087459" [ 2089.554164] env[63279]: _type = "Task" [ 2089.554164] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2089.561931] env[63279]: DEBUG oslo_vmware.api [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087459, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.570941] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquiring lock "refresh_cache-4871421f-0015-4973-bb5f-c9042d411c82" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2089.571162] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquired lock "refresh_cache-4871421f-0015-4973-bb5f-c9042d411c82" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2089.571482] env[63279]: DEBUG nova.network.neutron [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2089.707696] env[63279]: DEBUG nova.scheduler.client.report [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2089.774673] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2089.774922] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2089.775232] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleting the datastore file [datastore1] c1ac4af5-b01e-4175-844f-7a67b2ef7526 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2089.775421] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afcc24ff-daa3-4953-aac9-54042a8934c7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.782451] env[63279]: DEBUG oslo_vmware.api [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2089.782451] env[63279]: value = "task-2087460" [ 2089.782451] env[63279]: _type = "Task" [ 2089.782451] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2089.791159] env[63279]: DEBUG oslo_vmware.api [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087460, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.040415] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Releasing lock "refresh_cache-c8b42e3b-b841-4b79-a4f3-ef62577d4902" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2090.040765] env[63279]: DEBUG nova.compute.manager [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Instance network_info: |[{"id": "2cc659c6-5797-4426-a92a-3924cc611395", "address": "fa:16:3e:f4:07:b5", "network": {"id": "01008afc-f9d7-497e-9566-78b239e4fb8c", "bridge": "br-int", "label": "tempest-ServersTestJSON-523576344-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8964759ce46d43998d52095a843dbc2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc659c6-57", "ovs_interfaceid": "2cc659c6-5797-4426-a92a-3924cc611395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2090.041230] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:07:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4349e30-c086-4c24-9e0e-83996d808a1b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2cc659c6-5797-4426-a92a-3924cc611395', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2090.049285] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Creating folder: Project (8964759ce46d43998d52095a843dbc2c). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2090.049599] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5bd935c4-e803-4e1a-8160-230aac70e2a0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.060922] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Created folder: Project (8964759ce46d43998d52095a843dbc2c) in parent group-v427491. [ 2090.061211] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Creating folder: Instances. Parent ref: group-v427681. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2090.064428] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6bb34780-207d-4f59-93cd-36678014f92e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.066289] env[63279]: DEBUG oslo_vmware.api [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087459, 'name': PowerOffVM_Task, 'duration_secs': 0.210398} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2090.066583] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2090.066758] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2090.067386] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b296b8e9-c398-4a47-9125-809d435ca3ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.076093] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Created folder: Instances in parent group-v427681. [ 2090.076598] env[63279]: DEBUG oslo.service.loopingcall [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2090.076822] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2090.077835] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1027fd12-c2cb-408f-83c9-e0d3d49b7e9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.096961] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2090.096961] env[63279]: value = "task-2087465" [ 2090.096961] env[63279]: _type = "Task" [ 2090.096961] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.097994] env[63279]: DEBUG nova.network.neutron [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2090.110916] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087465, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.154293] env[63279]: DEBUG nova.network.neutron [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2090.214750] env[63279]: DEBUG nova.compute.manager [req-5ba43761-1055-4a0c-852f-0a78c1ddccb6 req-960a2bae-1ceb-4d36-841c-9bd73b6db330 service nova] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Received event network-changed-2cc659c6-5797-4426-a92a-3924cc611395 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2090.215099] env[63279]: DEBUG nova.compute.manager [req-5ba43761-1055-4a0c-852f-0a78c1ddccb6 req-960a2bae-1ceb-4d36-841c-9bd73b6db330 service nova] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Refreshing instance network info cache due to event network-changed-2cc659c6-5797-4426-a92a-3924cc611395. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2090.215193] env[63279]: DEBUG oslo_concurrency.lockutils [req-5ba43761-1055-4a0c-852f-0a78c1ddccb6 req-960a2bae-1ceb-4d36-841c-9bd73b6db330 service nova] Acquiring lock "refresh_cache-c8b42e3b-b841-4b79-a4f3-ef62577d4902" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2090.215341] env[63279]: DEBUG oslo_concurrency.lockutils [req-5ba43761-1055-4a0c-852f-0a78c1ddccb6 req-960a2bae-1ceb-4d36-841c-9bd73b6db330 service nova] Acquired lock "refresh_cache-c8b42e3b-b841-4b79-a4f3-ef62577d4902" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2090.215509] env[63279]: DEBUG nova.network.neutron [req-5ba43761-1055-4a0c-852f-0a78c1ddccb6 req-960a2bae-1ceb-4d36-841c-9bd73b6db330 service nova] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Refreshing network info cache for port 2cc659c6-5797-4426-a92a-3924cc611395 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2090.217574] env[63279]: DEBUG oslo_concurrency.lockutils [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.218056] env[63279]: DEBUG nova.compute.manager [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2090.223014] env[63279]: DEBUG oslo_concurrency.lockutils [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.300s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2090.223328] env[63279]: DEBUG nova.objects.instance [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lazy-loading 'resources' on Instance uuid 7db0c32d-36a4-4452-bb07-06de0c93ab50 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2090.296176] env[63279]: DEBUG oslo_vmware.api [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087460, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199307} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2090.296496] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2090.296740] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2090.296942] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2090.297140] env[63279]: INFO nova.compute.manager [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Took 1.41 seconds to destroy the instance on the hypervisor. [ 2090.297388] env[63279]: DEBUG oslo.service.loopingcall [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2090.298028] env[63279]: DEBUG nova.compute.manager [-] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2090.298028] env[63279]: DEBUG nova.network.neutron [-] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2090.438519] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2090.438969] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2090.438969] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Deleting the datastore file [datastore1] 246f0945-7290-4cb7-a982-b17cb1573002 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2090.439338] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7687cab-0de5-4f12-a3b3-425205a110d8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.447599] env[63279]: DEBUG oslo_vmware.api [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Waiting for the task: (returnval){ [ 2090.447599] env[63279]: value = "task-2087466" [ 2090.447599] env[63279]: _type = "Task" [ 2090.447599] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.455665] env[63279]: DEBUG oslo_vmware.api [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087466, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.610376] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087465, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.656310] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Releasing lock "refresh_cache-4871421f-0015-4973-bb5f-c9042d411c82" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2090.656758] env[63279]: DEBUG nova.compute.manager [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2090.656960] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2090.657856] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2516ed97-2447-4dfb-a47f-67470d45ef35 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.665209] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2090.665456] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-469438a1-1be7-456a-b90a-b932111f3c90 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.671738] env[63279]: DEBUG oslo_vmware.api [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2090.671738] env[63279]: value = "task-2087467" [ 2090.671738] env[63279]: _type = "Task" [ 2090.671738] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.681114] env[63279]: DEBUG oslo_vmware.api [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087467, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.727129] env[63279]: DEBUG nova.compute.utils [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2090.731356] env[63279]: DEBUG nova.compute.manager [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Not allocating networking since 'none' was specified. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2090.962940] env[63279]: DEBUG oslo_vmware.api [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087466, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.014999] env[63279]: DEBUG nova.network.neutron [req-5ba43761-1055-4a0c-852f-0a78c1ddccb6 req-960a2bae-1ceb-4d36-841c-9bd73b6db330 service nova] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Updated VIF entry in instance network info cache for port 2cc659c6-5797-4426-a92a-3924cc611395. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2091.015787] env[63279]: DEBUG nova.network.neutron [req-5ba43761-1055-4a0c-852f-0a78c1ddccb6 req-960a2bae-1ceb-4d36-841c-9bd73b6db330 service nova] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Updating instance_info_cache with network_info: [{"id": "2cc659c6-5797-4426-a92a-3924cc611395", "address": "fa:16:3e:f4:07:b5", "network": {"id": "01008afc-f9d7-497e-9566-78b239e4fb8c", "bridge": "br-int", "label": "tempest-ServersTestJSON-523576344-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8964759ce46d43998d52095a843dbc2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc659c6-57", "ovs_interfaceid": "2cc659c6-5797-4426-a92a-3924cc611395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2091.110555] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087465, 'name': CreateVM_Task, 'duration_secs': 0.648567} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.112741] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2091.113564] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2091.113736] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2091.114057] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2091.114303] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be668694-5b17-4b2c-86f8-c532badec169 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.118799] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Waiting for the task: (returnval){ [ 2091.118799] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52867664-e591-b6e8-67c2-faf97016b56f" [ 2091.118799] env[63279]: _type = "Task" [ 2091.118799] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.128266] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52867664-e591-b6e8-67c2-faf97016b56f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.157492] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf9cb4e-368f-4612-8b84-3d627a0bc5a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.164781] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf7bb8ff-7ab2-44a1-a97b-7983c787366b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.199033] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ebe7de-3bb2-4f0e-928f-7ec4ba0294d5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.209187] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e4d2d6-f54c-441f-8ac3-c3c615508b6f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.213342] env[63279]: DEBUG oslo_vmware.api [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087467, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.222939] env[63279]: DEBUG nova.compute.provider_tree [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2091.233056] env[63279]: DEBUG nova.compute.manager [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2091.279638] env[63279]: DEBUG nova.network.neutron [-] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2091.458338] env[63279]: DEBUG oslo_vmware.api [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Task: {'id': task-2087466, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.892392} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.458607] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2091.458784] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2091.458963] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2091.459164] env[63279]: INFO nova.compute.manager [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Took 1.92 seconds to destroy the instance on the hypervisor. [ 2091.459453] env[63279]: DEBUG oslo.service.loopingcall [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2091.459647] env[63279]: DEBUG nova.compute.manager [-] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2091.459743] env[63279]: DEBUG nova.network.neutron [-] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2091.520613] env[63279]: DEBUG oslo_concurrency.lockutils [req-5ba43761-1055-4a0c-852f-0a78c1ddccb6 req-960a2bae-1ceb-4d36-841c-9bd73b6db330 service nova] Releasing lock "refresh_cache-c8b42e3b-b841-4b79-a4f3-ef62577d4902" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2091.629294] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52867664-e591-b6e8-67c2-faf97016b56f, 'name': SearchDatastore_Task, 'duration_secs': 0.021851} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.629655] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2091.629899] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2091.630201] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2091.630462] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2091.630588] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2091.630889] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee91790a-2ab0-4d7f-ac43-42d135041edc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.640057] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2091.640305] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2091.641060] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb2cda70-98bc-4771-8706-f341038bed66 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.646907] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Waiting for the task: (returnval){ [ 2091.646907] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d196d0-4f7e-047a-0a1e-4c58fe5bbe5f" [ 2091.646907] env[63279]: _type = "Task" [ 2091.646907] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.655043] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d196d0-4f7e-047a-0a1e-4c58fe5bbe5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.706906] env[63279]: DEBUG oslo_vmware.api [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087467, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.723347] env[63279]: DEBUG nova.compute.manager [req-8dfd69b3-7d0c-4ffe-827c-d38761ce54cb req-82efeaa8-f558-4689-87e0-d9a2d567e596 service nova] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Received event network-vif-deleted-55fb7ecc-3067-4f87-af91-9742fa7d90b1 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2091.723552] env[63279]: INFO nova.compute.manager [req-8dfd69b3-7d0c-4ffe-827c-d38761ce54cb req-82efeaa8-f558-4689-87e0-d9a2d567e596 service nova] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Neutron deleted interface 55fb7ecc-3067-4f87-af91-9742fa7d90b1; detaching it from the instance and deleting it from the info cache [ 2091.723788] env[63279]: DEBUG nova.network.neutron [req-8dfd69b3-7d0c-4ffe-827c-d38761ce54cb req-82efeaa8-f558-4689-87e0-d9a2d567e596 service nova] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2091.725766] env[63279]: DEBUG nova.scheduler.client.report [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2091.782687] env[63279]: INFO nova.compute.manager [-] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Took 1.48 seconds to deallocate network for instance. [ 2092.158462] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d196d0-4f7e-047a-0a1e-4c58fe5bbe5f, 'name': SearchDatastore_Task, 'duration_secs': 0.014836} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.159271] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26c4038e-c91e-4ddd-99f9-3d292444d3d0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.167724] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Waiting for the task: (returnval){ [ 2092.167724] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521e97c5-9f5a-3d5a-04ab-446ef78b6a7a" [ 2092.167724] env[63279]: _type = "Task" [ 2092.167724] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.175418] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521e97c5-9f5a-3d5a-04ab-446ef78b6a7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.205494] env[63279]: DEBUG oslo_vmware.api [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087467, 'name': PowerOffVM_Task, 'duration_secs': 1.153514} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.205762] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2092.205930] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2092.206197] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f6312e5-dc62-4617-82df-ea1a0e5aabd9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.207762] env[63279]: DEBUG nova.network.neutron [-] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2092.231052] env[63279]: DEBUG oslo_concurrency.lockutils [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.008s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.234360] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64443045-7249-42f4-9c08-8cde50d6903b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.236539] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.264s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.238095] env[63279]: INFO nova.compute.claims [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2092.241152] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2092.241356] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2092.241545] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Deleting the datastore file [datastore1] 4871421f-0015-4973-bb5f-c9042d411c82 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2092.243390] env[63279]: DEBUG nova.compute.manager [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2092.245232] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30c22a66-e3c7-4395-bc62-fafebec3a969 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.248062] env[63279]: DEBUG nova.compute.manager [req-9ea08a35-82fc-49a4-95a8-093b81994678 req-eaa77615-b767-4294-8e91-abef6677d051 service nova] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Received event network-vif-deleted-d61892fc-4ba7-43db-8d82-c981cf8c7008 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2092.253451] env[63279]: INFO nova.scheduler.client.report [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleted allocations for instance 7db0c32d-36a4-4452-bb07-06de0c93ab50 [ 2092.257831] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4429bd6e-262a-4180-90fc-06e66f1f4fa6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.268411] env[63279]: DEBUG oslo_vmware.api [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for the task: (returnval){ [ 2092.268411] env[63279]: value = "task-2087470" [ 2092.268411] env[63279]: _type = "Task" [ 2092.268411] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.282156] env[63279]: DEBUG nova.virt.hardware [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2092.282429] env[63279]: DEBUG nova.virt.hardware [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2092.282607] env[63279]: DEBUG nova.virt.hardware [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2092.282824] env[63279]: DEBUG nova.virt.hardware [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2092.282991] env[63279]: DEBUG nova.virt.hardware [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2092.283352] env[63279]: DEBUG nova.virt.hardware [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2092.283609] env[63279]: DEBUG nova.virt.hardware [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2092.283811] env[63279]: DEBUG nova.virt.hardware [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2092.284018] env[63279]: DEBUG nova.virt.hardware [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2092.284233] env[63279]: DEBUG nova.virt.hardware [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2092.284445] env[63279]: DEBUG nova.virt.hardware [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2092.285746] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a505b71-9dad-4ae4-97c3-2a724a4ca5bc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.304408] env[63279]: DEBUG oslo_concurrency.lockutils [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2092.304801] env[63279]: DEBUG nova.compute.manager [req-8dfd69b3-7d0c-4ffe-827c-d38761ce54cb req-82efeaa8-f558-4689-87e0-d9a2d567e596 service nova] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Detach interface failed, port_id=55fb7ecc-3067-4f87-af91-9742fa7d90b1, reason: Instance 246f0945-7290-4cb7-a982-b17cb1573002 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2092.305518] env[63279]: DEBUG oslo_vmware.api [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087470, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.311651] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5067ef10-2b36-4904-ad9a-5932775173bf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.326581] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Instance VIF info [] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2092.332333] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Creating folder: Project (7a9efcefc5554799a2fb3b8348ee92e3). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2092.332978] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3ada792-bb5c-459d-bb93-a85253956d33 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.341832] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Created folder: Project (7a9efcefc5554799a2fb3b8348ee92e3) in parent group-v427491. [ 2092.342101] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Creating folder: Instances. Parent ref: group-v427684. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2092.342276] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dc9fcc28-9f75-452b-a46b-1f66a5fb0735 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.351566] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Created folder: Instances in parent group-v427684. [ 2092.351795] env[63279]: DEBUG oslo.service.loopingcall [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2092.351980] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2092.352207] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebf4f24a-ca00-4613-913c-919e62840f02 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.368311] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2092.368311] env[63279]: value = "task-2087473" [ 2092.368311] env[63279]: _type = "Task" [ 2092.368311] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.375728] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087473, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.678500] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521e97c5-9f5a-3d5a-04ab-446ef78b6a7a, 'name': SearchDatastore_Task, 'duration_secs': 0.009146} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.678858] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2092.679146] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] c8b42e3b-b841-4b79-a4f3-ef62577d4902/c8b42e3b-b841-4b79-a4f3-ef62577d4902.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2092.679447] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2ff613f-878a-4a60-8c51-8f6a15a58e75 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.685735] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Waiting for the task: (returnval){ [ 2092.685735] env[63279]: value = "task-2087474" [ 2092.685735] env[63279]: _type = "Task" [ 2092.685735] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.695091] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': task-2087474, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.710516] env[63279]: INFO nova.compute.manager [-] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Took 1.25 seconds to deallocate network for instance. [ 2092.779192] env[63279]: DEBUG oslo_concurrency.lockutils [None req-81ee3126-1583-4fad-aaad-b59e9f272102 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "7db0c32d-36a4-4452-bb07-06de0c93ab50" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.066s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.785228] env[63279]: DEBUG oslo_vmware.api [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Task: {'id': task-2087470, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085105} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.785728] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2092.785924] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2092.786125] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2092.786301] env[63279]: INFO nova.compute.manager [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Took 2.13 seconds to destroy the instance on the hypervisor. [ 2092.786574] env[63279]: DEBUG oslo.service.loopingcall [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2092.786769] env[63279]: DEBUG nova.compute.manager [-] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2092.786862] env[63279]: DEBUG nova.network.neutron [-] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2092.801816] env[63279]: DEBUG nova.network.neutron [-] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2092.880146] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087473, 'name': CreateVM_Task, 'duration_secs': 0.262702} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.880339] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2092.880900] env[63279]: DEBUG oslo_concurrency.lockutils [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2092.881169] env[63279]: DEBUG oslo_concurrency.lockutils [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2092.881575] env[63279]: DEBUG oslo_concurrency.lockutils [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2092.881920] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6178793-d9cb-4627-9b76-c3642129f2c4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.887168] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2092.887168] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a0f6c8-991c-a9b2-cf68-c1e5951b7b92" [ 2092.887168] env[63279]: _type = "Task" [ 2092.887168] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.900329] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a0f6c8-991c-a9b2-cf68-c1e5951b7b92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.195582] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': task-2087474, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44556} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.195858] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] c8b42e3b-b841-4b79-a4f3-ef62577d4902/c8b42e3b-b841-4b79-a4f3-ef62577d4902.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2093.196087] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2093.196341] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2dee848d-0e92-4378-a0a5-b9055178ff06 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.203218] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Waiting for the task: (returnval){ [ 2093.203218] env[63279]: value = "task-2087475" [ 2093.203218] env[63279]: _type = "Task" [ 2093.203218] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.211571] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': task-2087475, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.219769] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2093.305018] env[63279]: DEBUG nova.network.neutron [-] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2093.397450] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a0f6c8-991c-a9b2-cf68-c1e5951b7b92, 'name': SearchDatastore_Task, 'duration_secs': 0.053333} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.398332] env[63279]: DEBUG oslo_concurrency.lockutils [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2093.398332] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2093.398332] env[63279]: DEBUG oslo_concurrency.lockutils [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2093.398514] env[63279]: DEBUG oslo_concurrency.lockutils [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2093.398546] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2093.398801] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41830389-55ec-4030-a446-873a2a89d7a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.411619] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2093.411821] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2093.412663] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3d5a69b-0b6f-4a8c-82c0-99f1f2b6ec78 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.420511] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2093.420511] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5245fd4a-3645-e828-c5a1-04e7a956e3b4" [ 2093.420511] env[63279]: _type = "Task" [ 2093.420511] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.430476] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5245fd4a-3645-e828-c5a1-04e7a956e3b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.651131] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2dfb14c-8bd6-4aa3-ad53-ae5246475e9f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.658864] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635300d8-8af7-4ba4-a3b9-b8c67739858c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.690267] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1026e10-1b2f-4664-86a0-5d7acc62ffa4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.698213] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1d4e58-5815-443c-9732-70ddb2659263 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.716610] env[63279]: DEBUG nova.compute.provider_tree [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2093.720535] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': task-2087475, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.720792] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2093.721662] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2faefcb-8885-46f6-8c0a-622d128d6971 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.743960] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] c8b42e3b-b841-4b79-a4f3-ef62577d4902/c8b42e3b-b841-4b79-a4f3-ef62577d4902.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2093.745013] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f2ded8f-0141-4212-b7f3-5e98f75bdc85 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.763970] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Waiting for the task: (returnval){ [ 2093.763970] env[63279]: value = "task-2087476" [ 2093.763970] env[63279]: _type = "Task" [ 2093.763970] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.772269] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': task-2087476, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.807302] env[63279]: INFO nova.compute.manager [-] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Took 1.02 seconds to deallocate network for instance. [ 2093.933907] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5245fd4a-3645-e828-c5a1-04e7a956e3b4, 'name': SearchDatastore_Task, 'duration_secs': 0.009801} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.934805] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9339d5e-35bb-48cd-9940-c3f6261eba48 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.940794] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2093.940794] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d186cb-f0fd-1800-53fb-d8e697e029a4" [ 2093.940794] env[63279]: _type = "Task" [ 2093.940794] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.948928] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d186cb-f0fd-1800-53fb-d8e697e029a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.218157] env[63279]: DEBUG nova.scheduler.client.report [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2094.274018] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': task-2087476, 'name': ReconfigVM_Task, 'duration_secs': 0.31328} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.274372] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Reconfigured VM instance instance-00000044 to attach disk [datastore1] c8b42e3b-b841-4b79-a4f3-ef62577d4902/c8b42e3b-b841-4b79-a4f3-ef62577d4902.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2094.275000] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7bda54f-ec69-4dfc-99af-31bbfd6c5157 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.281711] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Waiting for the task: (returnval){ [ 2094.281711] env[63279]: value = "task-2087478" [ 2094.281711] env[63279]: _type = "Task" [ 2094.281711] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.290599] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': task-2087478, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.316172] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.453967] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d186cb-f0fd-1800-53fb-d8e697e029a4, 'name': SearchDatastore_Task, 'duration_secs': 0.009971} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.453967] env[63279]: DEBUG oslo_concurrency.lockutils [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2094.453967] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] f7f88f1a-a81a-4208-88d7-6a264e642ab1/f7f88f1a-a81a-4208-88d7-6a264e642ab1.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2094.453967] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-089c1cbf-45e5-4caa-96cf-99591b7bbd76 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.459677] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2094.459677] env[63279]: value = "task-2087479" [ 2094.459677] env[63279]: _type = "Task" [ 2094.459677] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.467575] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087479, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.725010] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2094.725711] env[63279]: DEBUG nova.compute.manager [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2094.728698] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 33.523s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.792481] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': task-2087478, 'name': Rename_Task, 'duration_secs': 0.2862} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.792779] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2094.793055] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c362bbc0-524b-49f5-9ef7-fe64d5189ce1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.800419] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Waiting for the task: (returnval){ [ 2094.800419] env[63279]: value = "task-2087480" [ 2094.800419] env[63279]: _type = "Task" [ 2094.800419] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.809173] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': task-2087480, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.861691] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "d6e40dbc-f20e-4164-b460-18de6ea72906" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.861964] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d6e40dbc-f20e-4164-b460-18de6ea72906" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.969585] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087479, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48107} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.969813] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] f7f88f1a-a81a-4208-88d7-6a264e642ab1/f7f88f1a-a81a-4208-88d7-6a264e642ab1.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2094.970042] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2094.970320] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-681d173e-6a55-41c9-9c0b-a372e4ab262e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.977085] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2094.977085] env[63279]: value = "task-2087481" [ 2094.977085] env[63279]: _type = "Task" [ 2094.977085] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.984981] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087481, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.232015] env[63279]: DEBUG nova.compute.utils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2095.233558] env[63279]: DEBUG nova.compute.manager [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2095.233787] env[63279]: DEBUG nova.network.neutron [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2095.237820] env[63279]: INFO nova.compute.claims [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2095.244643] env[63279]: DEBUG nova.compute.manager [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2095.311788] env[63279]: DEBUG oslo_vmware.api [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': task-2087480, 'name': PowerOnVM_Task, 'duration_secs': 0.466058} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.312111] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2095.312324] env[63279]: INFO nova.compute.manager [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Took 7.76 seconds to spawn the instance on the hypervisor. [ 2095.312509] env[63279]: DEBUG nova.compute.manager [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2095.313526] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ea7e62-36c4-4248-95fc-a9be58decb6a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.323289] env[63279]: DEBUG nova.policy [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17fbc0127a5944ac933232873f282980', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '77ee2145dda94e2b85eeb7379ed98e26', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2095.365258] env[63279]: DEBUG nova.compute.manager [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2095.487262] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087481, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.174086} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.491042] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2095.491042] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbcf8cb-aa38-4f64-a083-0631c4d369df {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.510499] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] f7f88f1a-a81a-4208-88d7-6a264e642ab1/f7f88f1a-a81a-4208-88d7-6a264e642ab1.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2095.510607] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7cd3a565-70c1-4091-8e05-8ee72a4726a4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.530641] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2095.530641] env[63279]: value = "task-2087482" [ 2095.530641] env[63279]: _type = "Task" [ 2095.530641] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.538625] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087482, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.752405] env[63279]: INFO nova.compute.resource_tracker [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating resource usage from migration 44650656-4b9e-403b-9bcc-56f29df93e8b [ 2095.842888] env[63279]: DEBUG nova.network.neutron [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Successfully created port: 036111dc-8280-4649-98b1-f0319d92337f {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2095.846106] env[63279]: INFO nova.compute.manager [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Took 50.38 seconds to build instance. [ 2095.883517] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2096.044379] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087482, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.243977] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02335434-b9e2-4a70-b3d8-b340570389aa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.251978] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d486c2-67ee-4fb0-ad15-345d39d239e3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.258513] env[63279]: DEBUG nova.compute.manager [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2096.296377] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93b6b6e-bd2c-47c1-bb6c-d99c7c3d917e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.308082] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a570117-7c9d-4b55-b020-b8918983f340 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.311981] env[63279]: DEBUG nova.virt.hardware [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2096.312338] env[63279]: DEBUG nova.virt.hardware [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2096.312568] env[63279]: DEBUG nova.virt.hardware [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2096.312879] env[63279]: DEBUG nova.virt.hardware [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2096.313099] env[63279]: DEBUG nova.virt.hardware [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2096.313318] env[63279]: DEBUG nova.virt.hardware [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2096.314515] env[63279]: DEBUG nova.virt.hardware [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2096.314515] env[63279]: DEBUG nova.virt.hardware [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2096.314515] env[63279]: DEBUG nova.virt.hardware [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2096.314515] env[63279]: DEBUG nova.virt.hardware [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2096.314515] env[63279]: DEBUG nova.virt.hardware [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2096.318071] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2981454d-ae9b-4445-9615-32230ac7c279 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.331280] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd87705e-a0f4-40bc-a7e7-41ae1e6d482a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.335013] env[63279]: DEBUG nova.compute.provider_tree [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2096.349537] env[63279]: DEBUG oslo_concurrency.lockutils [None req-106d6061-d6a3-483e-96fd-6e6c9f393126 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Lock "c8b42e3b-b841-4b79-a4f3-ef62577d4902" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.587s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2096.541854] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087482, 'name': ReconfigVM_Task, 'duration_secs': 0.989704} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.543242] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Reconfigured VM instance instance-00000045 to attach disk [datastore1] f7f88f1a-a81a-4208-88d7-6a264e642ab1/f7f88f1a-a81a-4208-88d7-6a264e642ab1.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2096.543242] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98b494c5-28b9-42f2-a73f-6e820bc10fef {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.550104] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2096.550104] env[63279]: value = "task-2087484" [ 2096.550104] env[63279]: _type = "Task" [ 2096.550104] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.558962] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087484, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.599619] env[63279]: DEBUG nova.compute.manager [req-422508f5-23f3-4492-8af4-adb0ad596c76 req-d437cfa4-ed09-4e23-be5a-398e7038f084 service nova] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Received event network-changed-2cc659c6-5797-4426-a92a-3924cc611395 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2096.599619] env[63279]: DEBUG nova.compute.manager [req-422508f5-23f3-4492-8af4-adb0ad596c76 req-d437cfa4-ed09-4e23-be5a-398e7038f084 service nova] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Refreshing instance network info cache due to event network-changed-2cc659c6-5797-4426-a92a-3924cc611395. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2096.599619] env[63279]: DEBUG oslo_concurrency.lockutils [req-422508f5-23f3-4492-8af4-adb0ad596c76 req-d437cfa4-ed09-4e23-be5a-398e7038f084 service nova] Acquiring lock "refresh_cache-c8b42e3b-b841-4b79-a4f3-ef62577d4902" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2096.599619] env[63279]: DEBUG oslo_concurrency.lockutils [req-422508f5-23f3-4492-8af4-adb0ad596c76 req-d437cfa4-ed09-4e23-be5a-398e7038f084 service nova] Acquired lock "refresh_cache-c8b42e3b-b841-4b79-a4f3-ef62577d4902" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2096.599619] env[63279]: DEBUG nova.network.neutron [req-422508f5-23f3-4492-8af4-adb0ad596c76 req-d437cfa4-ed09-4e23-be5a-398e7038f084 service nova] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Refreshing network info cache for port 2cc659c6-5797-4426-a92a-3924cc611395 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2096.838515] env[63279]: DEBUG nova.scheduler.client.report [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2097.062020] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087484, 'name': Rename_Task, 'duration_secs': 0.240891} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.062166] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2097.063083] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-950fe4d0-5124-4485-982b-83a30612eda9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.068974] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2097.068974] env[63279]: value = "task-2087485" [ 2097.068974] env[63279]: _type = "Task" [ 2097.068974] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.076866] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087485, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.345359] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.616s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.345582] env[63279]: INFO nova.compute.manager [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Migrating [ 2097.354972] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.758s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2097.357584] env[63279]: INFO nova.compute.claims [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2097.435161] env[63279]: DEBUG nova.network.neutron [req-422508f5-23f3-4492-8af4-adb0ad596c76 req-d437cfa4-ed09-4e23-be5a-398e7038f084 service nova] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Updated VIF entry in instance network info cache for port 2cc659c6-5797-4426-a92a-3924cc611395. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2097.435551] env[63279]: DEBUG nova.network.neutron [req-422508f5-23f3-4492-8af4-adb0ad596c76 req-d437cfa4-ed09-4e23-be5a-398e7038f084 service nova] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Updating instance_info_cache with network_info: [{"id": "2cc659c6-5797-4426-a92a-3924cc611395", "address": "fa:16:3e:f4:07:b5", "network": {"id": "01008afc-f9d7-497e-9566-78b239e4fb8c", "bridge": "br-int", "label": "tempest-ServersTestJSON-523576344-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8964759ce46d43998d52095a843dbc2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc659c6-57", "ovs_interfaceid": "2cc659c6-5797-4426-a92a-3924cc611395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2097.535160] env[63279]: DEBUG nova.compute.manager [req-4e018d2b-d911-41d4-99e7-1d5f0a9729a9 req-e80a2c94-598d-45b4-badc-f84328e99143 service nova] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Received event network-vif-plugged-036111dc-8280-4649-98b1-f0319d92337f {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2097.535337] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e018d2b-d911-41d4-99e7-1d5f0a9729a9 req-e80a2c94-598d-45b4-badc-f84328e99143 service nova] Acquiring lock "f2a68d73-49d6-4b38-aff1-c2eb850f2ca6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2097.535876] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e018d2b-d911-41d4-99e7-1d5f0a9729a9 req-e80a2c94-598d-45b4-badc-f84328e99143 service nova] Lock "f2a68d73-49d6-4b38-aff1-c2eb850f2ca6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2097.536247] env[63279]: DEBUG oslo_concurrency.lockutils [req-4e018d2b-d911-41d4-99e7-1d5f0a9729a9 req-e80a2c94-598d-45b4-badc-f84328e99143 service nova] Lock "f2a68d73-49d6-4b38-aff1-c2eb850f2ca6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.536499] env[63279]: DEBUG nova.compute.manager [req-4e018d2b-d911-41d4-99e7-1d5f0a9729a9 req-e80a2c94-598d-45b4-badc-f84328e99143 service nova] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] No waiting events found dispatching network-vif-plugged-036111dc-8280-4649-98b1-f0319d92337f {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2097.536687] env[63279]: WARNING nova.compute.manager [req-4e018d2b-d911-41d4-99e7-1d5f0a9729a9 req-e80a2c94-598d-45b4-badc-f84328e99143 service nova] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Received unexpected event network-vif-plugged-036111dc-8280-4649-98b1-f0319d92337f for instance with vm_state building and task_state spawning. [ 2097.567408] env[63279]: DEBUG nova.network.neutron [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Successfully updated port: 036111dc-8280-4649-98b1-f0319d92337f {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2097.583784] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087485, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.874611] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2097.874973] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2097.874973] env[63279]: DEBUG nova.network.neutron [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2097.939364] env[63279]: DEBUG oslo_concurrency.lockutils [req-422508f5-23f3-4492-8af4-adb0ad596c76 req-d437cfa4-ed09-4e23-be5a-398e7038f084 service nova] Releasing lock "refresh_cache-c8b42e3b-b841-4b79-a4f3-ef62577d4902" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2098.070160] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "refresh_cache-f2a68d73-49d6-4b38-aff1-c2eb850f2ca6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2098.070160] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "refresh_cache-f2a68d73-49d6-4b38-aff1-c2eb850f2ca6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2098.070160] env[63279]: DEBUG nova.network.neutron [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2098.081311] env[63279]: DEBUG oslo_vmware.api [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087485, 'name': PowerOnVM_Task, 'duration_secs': 0.803322} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.081605] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2098.081810] env[63279]: INFO nova.compute.manager [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Took 5.84 seconds to spawn the instance on the hypervisor. [ 2098.081990] env[63279]: DEBUG nova.compute.manager [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2098.082779] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56186514-7554-4bfe-be48-d92908b26d18 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.606088] env[63279]: INFO nova.compute.manager [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Took 40.36 seconds to build instance. [ 2098.609400] env[63279]: DEBUG nova.network.neutron [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating instance_info_cache with network_info: [{"id": "61682992-df73-44ba-b302-ba8e00c82f95", "address": "fa:16:3e:32:be:0b", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61682992-df", "ovs_interfaceid": "61682992-df73-44ba-b302-ba8e00c82f95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2098.627118] env[63279]: DEBUG nova.network.neutron [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2098.759037] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30ad024-b8c1-4a48-9b03-e3a416124f4e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.770526] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91abe57b-d3b6-4576-9182-744d4d7bd21d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.800705] env[63279]: DEBUG nova.network.neutron [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Updating instance_info_cache with network_info: [{"id": "036111dc-8280-4649-98b1-f0319d92337f", "address": "fa:16:3e:55:30:95", "network": {"id": "6d7c4c21-3389-4688-b373-329b2328f275", "bridge": "br-int", "label": "tempest-ImagesTestJSON-311425692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77ee2145dda94e2b85eeb7379ed98e26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap036111dc-82", "ovs_interfaceid": "036111dc-8280-4649-98b1-f0319d92337f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2098.802439] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e530f1ec-7051-4c27-a27d-863903506fb9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.810056] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05dcfa8e-9ac4-442f-8f56-398a421635fe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.823937] env[63279]: DEBUG nova.compute.provider_tree [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2099.107930] env[63279]: DEBUG oslo_concurrency.lockutils [None req-531b773c-e5fb-4ac2-9085-d70799185be7 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Lock "f7f88f1a-a81a-4208-88d7-6a264e642ab1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.789s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.113646] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2099.309612] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "refresh_cache-f2a68d73-49d6-4b38-aff1-c2eb850f2ca6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2099.309866] env[63279]: DEBUG nova.compute.manager [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Instance network_info: |[{"id": "036111dc-8280-4649-98b1-f0319d92337f", "address": "fa:16:3e:55:30:95", "network": {"id": "6d7c4c21-3389-4688-b373-329b2328f275", "bridge": "br-int", "label": "tempest-ImagesTestJSON-311425692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77ee2145dda94e2b85eeb7379ed98e26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap036111dc-82", "ovs_interfaceid": "036111dc-8280-4649-98b1-f0319d92337f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2099.310306] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:30:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0df968ae-c1ef-4009-a0f4-6f2e799c2fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '036111dc-8280-4649-98b1-f0319d92337f', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2099.318046] env[63279]: DEBUG oslo.service.loopingcall [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2099.318275] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2099.318540] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a6a844c-4260-4245-8fba-1b8ce72ab681 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.334504] env[63279]: DEBUG nova.scheduler.client.report [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2099.342941] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2099.342941] env[63279]: value = "task-2087487" [ 2099.342941] env[63279]: _type = "Task" [ 2099.342941] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.351087] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087487, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.587840] env[63279]: DEBUG nova.compute.manager [req-9046f7eb-f103-43c5-a26d-21b9ceeb58f4 req-d9c9a3a2-d36a-4c39-9d9e-63559abc2882 service nova] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Received event network-changed-036111dc-8280-4649-98b1-f0319d92337f {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2099.588018] env[63279]: DEBUG nova.compute.manager [req-9046f7eb-f103-43c5-a26d-21b9ceeb58f4 req-d9c9a3a2-d36a-4c39-9d9e-63559abc2882 service nova] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Refreshing instance network info cache due to event network-changed-036111dc-8280-4649-98b1-f0319d92337f. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2099.588247] env[63279]: DEBUG oslo_concurrency.lockutils [req-9046f7eb-f103-43c5-a26d-21b9ceeb58f4 req-d9c9a3a2-d36a-4c39-9d9e-63559abc2882 service nova] Acquiring lock "refresh_cache-f2a68d73-49d6-4b38-aff1-c2eb850f2ca6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2099.588405] env[63279]: DEBUG oslo_concurrency.lockutils [req-9046f7eb-f103-43c5-a26d-21b9ceeb58f4 req-d9c9a3a2-d36a-4c39-9d9e-63559abc2882 service nova] Acquired lock "refresh_cache-f2a68d73-49d6-4b38-aff1-c2eb850f2ca6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2099.588583] env[63279]: DEBUG nova.network.neutron [req-9046f7eb-f103-43c5-a26d-21b9ceeb58f4 req-d9c9a3a2-d36a-4c39-9d9e-63559abc2882 service nova] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Refreshing network info cache for port 036111dc-8280-4649-98b1-f0319d92337f {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2099.839612] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.485s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.840173] env[63279]: DEBUG nova.compute.manager [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2099.842979] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.807s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2099.844416] env[63279]: INFO nova.compute.claims [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2099.856228] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087487, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.883023] env[63279]: INFO nova.compute.manager [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Rebuilding instance [ 2099.932055] env[63279]: DEBUG nova.compute.manager [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2099.932235] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f46e9aa-8311-4cd6-b9b8-08833c615ff9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.349111] env[63279]: DEBUG nova.compute.utils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2100.354025] env[63279]: DEBUG nova.compute.manager [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2100.354025] env[63279]: DEBUG nova.network.neutron [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2100.358296] env[63279]: DEBUG nova.network.neutron [req-9046f7eb-f103-43c5-a26d-21b9ceeb58f4 req-d9c9a3a2-d36a-4c39-9d9e-63559abc2882 service nova] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Updated VIF entry in instance network info cache for port 036111dc-8280-4649-98b1-f0319d92337f. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2100.358728] env[63279]: DEBUG nova.network.neutron [req-9046f7eb-f103-43c5-a26d-21b9ceeb58f4 req-d9c9a3a2-d36a-4c39-9d9e-63559abc2882 service nova] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Updating instance_info_cache with network_info: [{"id": "036111dc-8280-4649-98b1-f0319d92337f", "address": "fa:16:3e:55:30:95", "network": {"id": "6d7c4c21-3389-4688-b373-329b2328f275", "bridge": "br-int", "label": "tempest-ImagesTestJSON-311425692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77ee2145dda94e2b85eeb7379ed98e26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap036111dc-82", "ovs_interfaceid": "036111dc-8280-4649-98b1-f0319d92337f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2100.370342] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087487, 'name': CreateVM_Task, 'duration_secs': 0.553475} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.371650] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2100.372547] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2100.372787] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2100.373240] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2100.373837] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb818e70-956c-43c7-bd14-32e3adc465fd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.379747] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2100.379747] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52405279-e63b-bc26-89ee-6919e04cc0f2" [ 2100.379747] env[63279]: _type = "Task" [ 2100.379747] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.388459] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52405279-e63b-bc26-89ee-6919e04cc0f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.416377] env[63279]: DEBUG nova.policy [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae6ae670025f406e8bd0362124749c43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f39174e9ff5649e0ade4391da383dfb2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2100.631625] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b7eb81-b33b-41ad-8329-da6158834a9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.658081] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating instance 'b981ac83-6c23-4d44-bd28-12da30d746bd' progress to 0 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2100.793971] env[63279]: DEBUG nova.network.neutron [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Successfully created port: 7baa1106-63c3-49cd-8d80-54c763d1b2b3 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2100.860106] env[63279]: DEBUG nova.compute.manager [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2100.868033] env[63279]: DEBUG oslo_concurrency.lockutils [req-9046f7eb-f103-43c5-a26d-21b9ceeb58f4 req-d9c9a3a2-d36a-4c39-9d9e-63559abc2882 service nova] Releasing lock "refresh_cache-f2a68d73-49d6-4b38-aff1-c2eb850f2ca6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2100.891580] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52405279-e63b-bc26-89ee-6919e04cc0f2, 'name': SearchDatastore_Task, 'duration_secs': 0.011276} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.891897] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2100.892150] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2100.892380] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2100.892534] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2100.892712] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2100.892981] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b1054e5-e562-4303-b013-c2a836e92574 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.905080] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2100.905493] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2100.906295] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10007826-0301-4674-92f2-e7e02e2fbec5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.915320] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2100.915320] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b69d04-3c01-a7c8-7c28-585b9075eb2e" [ 2100.915320] env[63279]: _type = "Task" [ 2100.915320] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.924079] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b69d04-3c01-a7c8-7c28-585b9075eb2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.945146] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2100.945404] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ce337ec-f917-45b4-acc1-e6e832fb6d2b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.954768] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2100.954768] env[63279]: value = "task-2087489" [ 2100.954768] env[63279]: _type = "Task" [ 2100.954768] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.967714] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087489, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.163881] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2101.164792] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-556a43bb-557f-45b5-8efb-1113f890a11c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.171955] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2101.171955] env[63279]: value = "task-2087490" [ 2101.171955] env[63279]: _type = "Task" [ 2101.171955] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2101.182966] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087490, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.267635] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc952296-8b05-4385-9663-92bc1d257d4a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.275913] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9050d005-b711-45fb-a79e-63bae074f6c2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.305109] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d8a410-101c-4089-9e75-eb5134490b76 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.313270] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95268f2a-0b24-41e4-adf6-69580917bce0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.328111] env[63279]: DEBUG nova.compute.provider_tree [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2101.426239] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b69d04-3c01-a7c8-7c28-585b9075eb2e, 'name': SearchDatastore_Task, 'duration_secs': 0.009472} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.427123] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-934a5016-12d7-4bde-bdca-98f78d6a1fe4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.433389] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2101.433389] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523d3fe7-47d4-1352-2c66-b6b098c98061" [ 2101.433389] env[63279]: _type = "Task" [ 2101.433389] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2101.442033] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523d3fe7-47d4-1352-2c66-b6b098c98061, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.464029] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087489, 'name': PowerOffVM_Task, 'duration_secs': 0.299784} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.464316] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2101.464562] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2101.465345] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06c0755-26ff-484f-80f1-17b79a87d90f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.472084] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2101.472335] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-81140784-30b5-4eef-a659-2ff18dc992bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.552799] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2101.553011] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2101.553178] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Deleting the datastore file [datastore1] f7f88f1a-a81a-4208-88d7-6a264e642ab1 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2101.553436] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34fee903-bb1a-42e3-a2ed-3bd0fac74b35 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.560332] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2101.560332] env[63279]: value = "task-2087492" [ 2101.560332] env[63279]: _type = "Task" [ 2101.560332] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2101.567908] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087492, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.681678] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087490, 'name': PowerOffVM_Task, 'duration_secs': 0.299642} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.681968] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2101.682182] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating instance 'b981ac83-6c23-4d44-bd28-12da30d746bd' progress to 17 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2101.831723] env[63279]: DEBUG nova.scheduler.client.report [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2101.868922] env[63279]: DEBUG nova.compute.manager [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2101.896119] env[63279]: DEBUG nova.virt.hardware [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2101.896380] env[63279]: DEBUG nova.virt.hardware [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2101.896547] env[63279]: DEBUG nova.virt.hardware [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2101.896734] env[63279]: DEBUG nova.virt.hardware [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2101.896886] env[63279]: DEBUG nova.virt.hardware [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2101.897189] env[63279]: DEBUG nova.virt.hardware [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2101.897455] env[63279]: DEBUG nova.virt.hardware [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2101.897631] env[63279]: DEBUG nova.virt.hardware [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2101.897854] env[63279]: DEBUG nova.virt.hardware [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2101.897965] env[63279]: DEBUG nova.virt.hardware [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2101.898160] env[63279]: DEBUG nova.virt.hardware [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2101.899023] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b258c27b-67da-438e-a382-5614da9d2e45 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.907535] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88451f30-d04e-4717-bb07-8f47907141cf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.943405] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523d3fe7-47d4-1352-2c66-b6b098c98061, 'name': SearchDatastore_Task, 'duration_secs': 0.009898} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.943601] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2101.943859] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] f2a68d73-49d6-4b38-aff1-c2eb850f2ca6/f2a68d73-49d6-4b38-aff1-c2eb850f2ca6.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2101.944141] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8309efc-3fa2-46e1-978c-1057b33d7727 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.950407] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2101.950407] env[63279]: value = "task-2087493" [ 2101.950407] env[63279]: _type = "Task" [ 2101.950407] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2101.957728] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087493, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.072358] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087492, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092429} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.072602] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2102.072787] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2102.072966] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2102.189367] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2102.189962] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2102.189962] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2102.190213] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2102.190399] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2102.190556] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2102.190767] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2102.190931] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2102.191191] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2102.191383] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2102.191582] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2102.197616] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da5fc6bc-6e2c-4d6a-be07-5fe135ec00c4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.218966] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2102.218966] env[63279]: value = "task-2087494" [ 2102.218966] env[63279]: _type = "Task" [ 2102.218966] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2102.229375] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087494, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.309902] env[63279]: DEBUG nova.compute.manager [req-3ea797e2-75ae-454f-87ca-2dafd40f514c req-a873ff38-0e6a-4f39-8582-e00b5a6e89fd service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Received event network-vif-plugged-7baa1106-63c3-49cd-8d80-54c763d1b2b3 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2102.310147] env[63279]: DEBUG oslo_concurrency.lockutils [req-3ea797e2-75ae-454f-87ca-2dafd40f514c req-a873ff38-0e6a-4f39-8582-e00b5a6e89fd service nova] Acquiring lock "fcdd3076-2b53-4850-9730-2f877e2cabfd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.310427] env[63279]: DEBUG oslo_concurrency.lockutils [req-3ea797e2-75ae-454f-87ca-2dafd40f514c req-a873ff38-0e6a-4f39-8582-e00b5a6e89fd service nova] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2102.310636] env[63279]: DEBUG oslo_concurrency.lockutils [req-3ea797e2-75ae-454f-87ca-2dafd40f514c req-a873ff38-0e6a-4f39-8582-e00b5a6e89fd service nova] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2102.310865] env[63279]: DEBUG nova.compute.manager [req-3ea797e2-75ae-454f-87ca-2dafd40f514c req-a873ff38-0e6a-4f39-8582-e00b5a6e89fd service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] No waiting events found dispatching network-vif-plugged-7baa1106-63c3-49cd-8d80-54c763d1b2b3 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2102.311231] env[63279]: WARNING nova.compute.manager [req-3ea797e2-75ae-454f-87ca-2dafd40f514c req-a873ff38-0e6a-4f39-8582-e00b5a6e89fd service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Received unexpected event network-vif-plugged-7baa1106-63c3-49cd-8d80-54c763d1b2b3 for instance with vm_state building and task_state spawning. [ 2102.337867] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2102.338448] env[63279]: DEBUG nova.compute.manager [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2102.342567] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.368s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2102.342657] env[63279]: DEBUG nova.objects.instance [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lazy-loading 'resources' on Instance uuid 795560b4-ccdc-4012-8130-042dcb94085f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2102.436380] env[63279]: DEBUG nova.network.neutron [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Successfully updated port: 7baa1106-63c3-49cd-8d80-54c763d1b2b3 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2102.461515] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087493, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.729295] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087494, 'name': ReconfigVM_Task, 'duration_secs': 0.180396} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.729616] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating instance 'b981ac83-6c23-4d44-bd28-12da30d746bd' progress to 33 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2102.849383] env[63279]: DEBUG nova.compute.utils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2102.851910] env[63279]: DEBUG nova.compute.manager [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2102.851910] env[63279]: DEBUG nova.network.neutron [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2102.893414] env[63279]: DEBUG nova.policy [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '90b0038da16e48ab8f7951f25a8eaa72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4f07bad15ea5419cbecc5840b4e96d01', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2102.939363] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2102.940585] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2102.940585] env[63279]: DEBUG nova.network.neutron [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2102.963588] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087493, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.112921] env[63279]: DEBUG nova.virt.hardware [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2103.113194] env[63279]: DEBUG nova.virt.hardware [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2103.113357] env[63279]: DEBUG nova.virt.hardware [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2103.113561] env[63279]: DEBUG nova.virt.hardware [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2103.113685] env[63279]: DEBUG nova.virt.hardware [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2103.113871] env[63279]: DEBUG nova.virt.hardware [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2103.114133] env[63279]: DEBUG nova.virt.hardware [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2103.114346] env[63279]: DEBUG nova.virt.hardware [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2103.114543] env[63279]: DEBUG nova.virt.hardware [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2103.114717] env[63279]: DEBUG nova.virt.hardware [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2103.114922] env[63279]: DEBUG nova.virt.hardware [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2103.115771] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305da582-c54f-48f5-8485-b650ea196582 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.128917] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e113290-e34f-47e8-a96c-a2a4c9e75c0f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.146495] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Instance VIF info [] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2103.154109] env[63279]: DEBUG oslo.service.loopingcall [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2103.155278] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2103.155698] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-afdcb8e0-e881-44a1-80e2-f0e8351dbb58 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.172896] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2103.172896] env[63279]: value = "task-2087496" [ 2103.172896] env[63279]: _type = "Task" [ 2103.172896] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2103.182483] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087496, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.238775] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2103.239045] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2103.239276] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2103.239451] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2103.239604] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2103.239754] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2103.239957] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2103.240159] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2103.240397] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2103.240591] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2103.240779] env[63279]: DEBUG nova.virt.hardware [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2103.246535] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Reconfiguring VM instance instance-0000003b to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2103.247916] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ff0574b-f407-4bd6-a067-0c419b850b3a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.269695] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2103.269695] env[63279]: value = "task-2087497" [ 2103.269695] env[63279]: _type = "Task" [ 2103.269695] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2103.281420] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087497, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.281420] env[63279]: DEBUG nova.network.neutron [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Successfully created port: a7b4791a-48da-4f2c-a8f0-debeceec3225 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2103.353700] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd43aa05-e344-40cd-947a-c908b71ceb1f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.356837] env[63279]: DEBUG nova.compute.manager [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2103.366062] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a08b62-b5f0-411b-9c41-714376ac5d4f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.402937] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66211c57-0918-45ef-91c7-714de55c092c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.414822] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3558c6d1-fc05-4579-8a3d-24e877c67914 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.431618] env[63279]: DEBUG nova.compute.provider_tree [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2103.461769] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087493, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.48051} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2103.462268] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] f2a68d73-49d6-4b38-aff1-c2eb850f2ca6/f2a68d73-49d6-4b38-aff1-c2eb850f2ca6.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2103.462268] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2103.462634] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-528487f5-abfa-49ab-8627-2e11bbd15860 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.468557] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2103.468557] env[63279]: value = "task-2087498" [ 2103.468557] env[63279]: _type = "Task" [ 2103.468557] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2103.481178] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087498, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.499461] env[63279]: DEBUG nova.network.neutron [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2103.684726] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087496, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.685619] env[63279]: DEBUG nova.network.neutron [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance_info_cache with network_info: [{"id": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "address": "fa:16:3e:cc:16:0a", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7baa1106-63", "ovs_interfaceid": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2103.779527] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087497, 'name': ReconfigVM_Task, 'duration_secs': 0.185443} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2103.779768] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Reconfigured VM instance instance-0000003b to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2103.780544] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc8c3bf-d9b9-4aba-8ebf-7d3c65e6580d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.802131] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] b981ac83-6c23-4d44-bd28-12da30d746bd/b981ac83-6c23-4d44-bd28-12da30d746bd.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2103.802379] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b29fa709-feb4-4158-b5e2-d36e34d67143 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.820027] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2103.820027] env[63279]: value = "task-2087499" [ 2103.820027] env[63279]: _type = "Task" [ 2103.820027] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2103.828303] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087499, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.934719] env[63279]: DEBUG nova.scheduler.client.report [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2103.978604] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087498, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06666} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2103.978604] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2103.979849] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a8edcd-7f53-4300-9a54-e34d0898b879 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.002314] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] f2a68d73-49d6-4b38-aff1-c2eb850f2ca6/f2a68d73-49d6-4b38-aff1-c2eb850f2ca6.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2104.002314] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38fa2fbd-685e-4d31-8cc2-09a826b949c6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.022847] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2104.022847] env[63279]: value = "task-2087500" [ 2104.022847] env[63279]: _type = "Task" [ 2104.022847] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.031294] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087500, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.184296] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087496, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.187942] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2104.188400] env[63279]: DEBUG nova.compute.manager [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Instance network_info: |[{"id": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "address": "fa:16:3e:cc:16:0a", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7baa1106-63", "ovs_interfaceid": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2104.188762] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:16:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57d4be17-536f-4a81-bea9-6547bd50f4a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7baa1106-63c3-49cd-8d80-54c763d1b2b3', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2104.196163] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Creating folder: Project (f39174e9ff5649e0ade4391da383dfb2). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2104.196429] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48c96c17-4dfd-4d09-b573-99e3c57fb1bc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.205750] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Created folder: Project (f39174e9ff5649e0ade4391da383dfb2) in parent group-v427491. [ 2104.205938] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Creating folder: Instances. Parent ref: group-v427690. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2104.206227] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f7175c8-110f-486f-9f9d-10fa5fc115a2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.214738] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Created folder: Instances in parent group-v427690. [ 2104.214974] env[63279]: DEBUG oslo.service.loopingcall [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2104.215183] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2104.215403] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-557816de-baf3-4dd7-b526-74e84e931ea9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.235157] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2104.235157] env[63279]: value = "task-2087503" [ 2104.235157] env[63279]: _type = "Task" [ 2104.235157] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.242979] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087503, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.331238] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087499, 'name': ReconfigVM_Task, 'duration_secs': 0.28029} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2104.331578] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Reconfigured VM instance instance-0000003b to attach disk [datastore1] b981ac83-6c23-4d44-bd28-12da30d746bd/b981ac83-6c23-4d44-bd28-12da30d746bd.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2104.332039] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating instance 'b981ac83-6c23-4d44-bd28-12da30d746bd' progress to 50 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2104.352990] env[63279]: DEBUG nova.compute.manager [req-de41aa34-ec00-4db0-b491-1a76687d59b3 req-006afef4-591a-4d22-bf44-3c7c874f7d15 service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Received event network-changed-7baa1106-63c3-49cd-8d80-54c763d1b2b3 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2104.353220] env[63279]: DEBUG nova.compute.manager [req-de41aa34-ec00-4db0-b491-1a76687d59b3 req-006afef4-591a-4d22-bf44-3c7c874f7d15 service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Refreshing instance network info cache due to event network-changed-7baa1106-63c3-49cd-8d80-54c763d1b2b3. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2104.353448] env[63279]: DEBUG oslo_concurrency.lockutils [req-de41aa34-ec00-4db0-b491-1a76687d59b3 req-006afef4-591a-4d22-bf44-3c7c874f7d15 service nova] Acquiring lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2104.353597] env[63279]: DEBUG oslo_concurrency.lockutils [req-de41aa34-ec00-4db0-b491-1a76687d59b3 req-006afef4-591a-4d22-bf44-3c7c874f7d15 service nova] Acquired lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2104.353837] env[63279]: DEBUG nova.network.neutron [req-de41aa34-ec00-4db0-b491-1a76687d59b3 req-006afef4-591a-4d22-bf44-3c7c874f7d15 service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Refreshing network info cache for port 7baa1106-63c3-49cd-8d80-54c763d1b2b3 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2104.365973] env[63279]: DEBUG nova.compute.manager [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2104.395983] env[63279]: DEBUG nova.virt.hardware [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2104.396268] env[63279]: DEBUG nova.virt.hardware [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2104.396435] env[63279]: DEBUG nova.virt.hardware [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2104.396623] env[63279]: DEBUG nova.virt.hardware [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2104.396774] env[63279]: DEBUG nova.virt.hardware [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2104.396925] env[63279]: DEBUG nova.virt.hardware [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2104.397217] env[63279]: DEBUG nova.virt.hardware [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2104.397419] env[63279]: DEBUG nova.virt.hardware [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2104.397596] env[63279]: DEBUG nova.virt.hardware [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2104.397777] env[63279]: DEBUG nova.virt.hardware [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2104.397958] env[63279]: DEBUG nova.virt.hardware [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2104.399102] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfbe6cf-e2a0-437a-a0ea-e63af1d5b679 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.407151] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5de20a3-decd-4f78-98ff-7d4fe1e445be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.440148] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.098s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.442680] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.799s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2104.444412] env[63279]: INFO nova.compute.claims [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2104.470218] env[63279]: INFO nova.scheduler.client.report [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Deleted allocations for instance 795560b4-ccdc-4012-8130-042dcb94085f [ 2104.535188] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087500, 'name': ReconfigVM_Task, 'duration_secs': 0.362249} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2104.535479] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Reconfigured VM instance instance-00000046 to attach disk [datastore1] f2a68d73-49d6-4b38-aff1-c2eb850f2ca6/f2a68d73-49d6-4b38-aff1-c2eb850f2ca6.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2104.536422] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4939746f-77df-4d57-a624-ed5f8dbb69fb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.544555] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2104.544555] env[63279]: value = "task-2087504" [ 2104.544555] env[63279]: _type = "Task" [ 2104.544555] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.686785] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087496, 'name': CreateVM_Task, 'duration_secs': 1.371272} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2104.686785] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2104.686785] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2104.686785] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2104.686785] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2104.686785] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a42b3fc9-0560-4e47-8798-c87f0c14c5c9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.691281] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2104.691281] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523ec70d-2f06-16dc-b752-7370624191d1" [ 2104.691281] env[63279]: _type = "Task" [ 2104.691281] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.699670] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523ec70d-2f06-16dc-b752-7370624191d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.744942] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087503, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.839606] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e45b917-9004-4e59-b58f-dd2e03088665 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.861700] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88adc4f2-3e56-449d-907e-8c54d929bbee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.881763] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating instance 'b981ac83-6c23-4d44-bd28-12da30d746bd' progress to 67 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2104.934105] env[63279]: DEBUG nova.network.neutron [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Successfully updated port: a7b4791a-48da-4f2c-a8f0-debeceec3225 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2104.980222] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17dbdfc2-2d7b-4361-bf16-3bfecc8b107b tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "795560b4-ccdc-4012-8130-042dcb94085f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.776s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2105.056521] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087504, 'name': Rename_Task, 'duration_secs': 0.222689} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.056521] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2105.056521] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c1d0e45-8035-4155-8984-bddfb78b73b6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.063263] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2105.063263] env[63279]: value = "task-2087505" [ 2105.063263] env[63279]: _type = "Task" [ 2105.063263] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.069844] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087505, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.129085] env[63279]: DEBUG nova.network.neutron [req-de41aa34-ec00-4db0-b491-1a76687d59b3 req-006afef4-591a-4d22-bf44-3c7c874f7d15 service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updated VIF entry in instance network info cache for port 7baa1106-63c3-49cd-8d80-54c763d1b2b3. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2105.130725] env[63279]: DEBUG nova.network.neutron [req-de41aa34-ec00-4db0-b491-1a76687d59b3 req-006afef4-591a-4d22-bf44-3c7c874f7d15 service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance_info_cache with network_info: [{"id": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "address": "fa:16:3e:cc:16:0a", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7baa1106-63", "ovs_interfaceid": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2105.204194] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523ec70d-2f06-16dc-b752-7370624191d1, 'name': SearchDatastore_Task, 'duration_secs': 0.01519} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.204341] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2105.204535] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2105.204783] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2105.204935] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2105.205130] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2105.205799] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4719bcd9-4f5b-4ebf-8de3-6df79b32d5da {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.214198] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2105.214394] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2105.215147] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90360f33-d99a-46fa-8f0b-0f603c930a07 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.220874] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2105.220874] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52dab27f-04f4-9ec6-6549-1b13fccb5b24" [ 2105.220874] env[63279]: _type = "Task" [ 2105.220874] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.228721] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52dab27f-04f4-9ec6-6549-1b13fccb5b24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.245681] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087503, 'name': CreateVM_Task, 'duration_secs': 0.523346} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.245920] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2105.246674] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2105.246848] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2105.247315] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2105.247579] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bb7757e-b5f5-495a-a525-6bc99e2249aa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.252824] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2105.252824] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521b3974-0cce-0082-b118-cafd1362ffe0" [ 2105.252824] env[63279]: _type = "Task" [ 2105.252824] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.260760] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521b3974-0cce-0082-b118-cafd1362ffe0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.436480] env[63279]: DEBUG nova.network.neutron [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Port 61682992-df73-44ba-b302-ba8e00c82f95 binding to destination host cpu-1 is already ACTIVE {{(pid=63279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2105.441606] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2105.441748] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquired lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2105.441895] env[63279]: DEBUG nova.network.neutron [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2105.554261] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Acquiring lock "861e4118-6134-40cf-91cb-865b6ee9f347" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2105.554734] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Lock "861e4118-6134-40cf-91cb-865b6ee9f347" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2105.574896] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087505, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.634933] env[63279]: DEBUG oslo_concurrency.lockutils [req-de41aa34-ec00-4db0-b491-1a76687d59b3 req-006afef4-591a-4d22-bf44-3c7c874f7d15 service nova] Releasing lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2105.741096] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52dab27f-04f4-9ec6-6549-1b13fccb5b24, 'name': SearchDatastore_Task, 'duration_secs': 0.008618} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.741096] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bb15515-23de-4551-a466-1ffbdc5cfa7c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.749548] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2105.749548] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5293efca-98a8-ade1-8568-2374204f94b7" [ 2105.749548] env[63279]: _type = "Task" [ 2105.749548] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.762816] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5293efca-98a8-ade1-8568-2374204f94b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.769878] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521b3974-0cce-0082-b118-cafd1362ffe0, 'name': SearchDatastore_Task, 'duration_secs': 0.009246} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.770298] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2105.770622] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2105.770933] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2105.925982] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ee5872-2352-447f-8a3f-506fbaec2c80 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.934637] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5541df24-8f4d-4635-bab0-cff48d0a6c19 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.982050] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560e488c-a6db-4cc2-84d6-fed6154ea750 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.993292] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e041ee-a012-4086-a787-c9a559a43f84 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.014918] env[63279]: DEBUG nova.compute.provider_tree [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2106.040598] env[63279]: DEBUG nova.network.neutron [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2106.061226] env[63279]: DEBUG nova.compute.manager [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2106.074923] env[63279]: DEBUG oslo_vmware.api [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087505, 'name': PowerOnVM_Task, 'duration_secs': 1.001187} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.077284] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2106.077284] env[63279]: INFO nova.compute.manager [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Took 9.82 seconds to spawn the instance on the hypervisor. [ 2106.077284] env[63279]: DEBUG nova.compute.manager [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2106.077773] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a04bd0f-0864-43a7-9a24-7d7a37df7c83 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.156889] env[63279]: DEBUG oslo_concurrency.lockutils [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "cd00cb0e-30e5-4a0c-8612-ea92e5e32edd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2106.157069] env[63279]: DEBUG oslo_concurrency.lockutils [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "cd00cb0e-30e5-4a0c-8612-ea92e5e32edd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.157339] env[63279]: DEBUG oslo_concurrency.lockutils [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "cd00cb0e-30e5-4a0c-8612-ea92e5e32edd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2106.157549] env[63279]: DEBUG oslo_concurrency.lockutils [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "cd00cb0e-30e5-4a0c-8612-ea92e5e32edd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.157729] env[63279]: DEBUG oslo_concurrency.lockutils [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "cd00cb0e-30e5-4a0c-8612-ea92e5e32edd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2106.160033] env[63279]: INFO nova.compute.manager [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Terminating instance [ 2106.234739] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "fd9b1666-8e06-4ed0-9187-05a40e136a1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2106.235019] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "fd9b1666-8e06-4ed0-9187-05a40e136a1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.235236] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "fd9b1666-8e06-4ed0-9187-05a40e136a1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2106.235428] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "fd9b1666-8e06-4ed0-9187-05a40e136a1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.235603] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "fd9b1666-8e06-4ed0-9187-05a40e136a1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2106.238360] env[63279]: INFO nova.compute.manager [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Terminating instance [ 2106.256694] env[63279]: DEBUG nova.network.neutron [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Updating instance_info_cache with network_info: [{"id": "a7b4791a-48da-4f2c-a8f0-debeceec3225", "address": "fa:16:3e:6d:22:31", "network": {"id": "9cfd684f-63e8-44aa-8569-0ab02c790458", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-318205966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f07bad15ea5419cbecc5840b4e96d01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7b4791a-48", "ovs_interfaceid": "a7b4791a-48da-4f2c-a8f0-debeceec3225", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2106.265269] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5293efca-98a8-ade1-8568-2374204f94b7, 'name': SearchDatastore_Task, 'duration_secs': 0.010805} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.265970] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2106.266551] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] f7f88f1a-a81a-4208-88d7-6a264e642ab1/f7f88f1a-a81a-4208-88d7-6a264e642ab1.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2106.266969] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2106.267292] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2106.267905] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-018dc965-83f3-4b14-a7a3-0a71051a5c5a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.270879] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-341e92bc-b39f-4b9c-8bb8-0aff523eafcb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.280579] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2106.280579] env[63279]: value = "task-2087506" [ 2106.280579] env[63279]: _type = "Task" [ 2106.280579] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.285514] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2106.288493] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2106.289978] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8611cc6-e69e-4e61-9e11-cabcb894bfc0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.295741] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087506, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.298811] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2106.298811] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521717fb-420b-7b10-e9ed-c7888d6a9582" [ 2106.298811] env[63279]: _type = "Task" [ 2106.298811] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.306836] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521717fb-420b-7b10-e9ed-c7888d6a9582, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.381993] env[63279]: DEBUG nova.compute.manager [req-fa64a289-dd5b-480e-a353-b87d0888ba18 req-46efa5ad-79b0-4959-b1f7-23249730dee5 service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Received event network-vif-plugged-a7b4791a-48da-4f2c-a8f0-debeceec3225 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2106.382302] env[63279]: DEBUG oslo_concurrency.lockutils [req-fa64a289-dd5b-480e-a353-b87d0888ba18 req-46efa5ad-79b0-4959-b1f7-23249730dee5 service nova] Acquiring lock "ee1b4746-49ac-425c-8219-4d54cb34abe0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2106.382624] env[63279]: DEBUG oslo_concurrency.lockutils [req-fa64a289-dd5b-480e-a353-b87d0888ba18 req-46efa5ad-79b0-4959-b1f7-23249730dee5 service nova] Lock "ee1b4746-49ac-425c-8219-4d54cb34abe0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.383038] env[63279]: DEBUG oslo_concurrency.lockutils [req-fa64a289-dd5b-480e-a353-b87d0888ba18 req-46efa5ad-79b0-4959-b1f7-23249730dee5 service nova] Lock "ee1b4746-49ac-425c-8219-4d54cb34abe0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2106.383095] env[63279]: DEBUG nova.compute.manager [req-fa64a289-dd5b-480e-a353-b87d0888ba18 req-46efa5ad-79b0-4959-b1f7-23249730dee5 service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] No waiting events found dispatching network-vif-plugged-a7b4791a-48da-4f2c-a8f0-debeceec3225 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2106.383325] env[63279]: WARNING nova.compute.manager [req-fa64a289-dd5b-480e-a353-b87d0888ba18 req-46efa5ad-79b0-4959-b1f7-23249730dee5 service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Received unexpected event network-vif-plugged-a7b4791a-48da-4f2c-a8f0-debeceec3225 for instance with vm_state building and task_state spawning. [ 2106.383578] env[63279]: DEBUG nova.compute.manager [req-fa64a289-dd5b-480e-a353-b87d0888ba18 req-46efa5ad-79b0-4959-b1f7-23249730dee5 service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Received event network-changed-a7b4791a-48da-4f2c-a8f0-debeceec3225 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2106.383844] env[63279]: DEBUG nova.compute.manager [req-fa64a289-dd5b-480e-a353-b87d0888ba18 req-46efa5ad-79b0-4959-b1f7-23249730dee5 service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Refreshing instance network info cache due to event network-changed-a7b4791a-48da-4f2c-a8f0-debeceec3225. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2106.383922] env[63279]: DEBUG oslo_concurrency.lockutils [req-fa64a289-dd5b-480e-a353-b87d0888ba18 req-46efa5ad-79b0-4959-b1f7-23249730dee5 service nova] Acquiring lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2106.496816] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "b981ac83-6c23-4d44-bd28-12da30d746bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2106.497092] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "b981ac83-6c23-4d44-bd28-12da30d746bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.497304] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "b981ac83-6c23-4d44-bd28-12da30d746bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2106.518531] env[63279]: DEBUG nova.scheduler.client.report [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2106.589689] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2106.603597] env[63279]: INFO nova.compute.manager [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Took 46.65 seconds to build instance. [ 2106.663703] env[63279]: DEBUG nova.compute.manager [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2106.664139] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2106.665848] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ebc36e0-a6fd-494a-99c1-4b29df7b5219 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.677684] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2106.678012] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78d192eb-24a2-45d7-a5b9-ef324179a22a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.685231] env[63279]: DEBUG oslo_vmware.api [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2106.685231] env[63279]: value = "task-2087507" [ 2106.685231] env[63279]: _type = "Task" [ 2106.685231] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.694587] env[63279]: DEBUG oslo_vmware.api [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087507, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.743710] env[63279]: DEBUG nova.compute.manager [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2106.743934] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2106.744938] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137f3d7a-1ca8-43f1-8a6c-63fd81aefc79 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.752956] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2106.753280] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31e5ee5d-1fc1-408e-95c7-dc2e5d262bcd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.760195] env[63279]: DEBUG oslo_vmware.api [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2106.760195] env[63279]: value = "task-2087508" [ 2106.760195] env[63279]: _type = "Task" [ 2106.760195] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.769955] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Releasing lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2106.770354] env[63279]: DEBUG nova.compute.manager [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Instance network_info: |[{"id": "a7b4791a-48da-4f2c-a8f0-debeceec3225", "address": "fa:16:3e:6d:22:31", "network": {"id": "9cfd684f-63e8-44aa-8569-0ab02c790458", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-318205966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f07bad15ea5419cbecc5840b4e96d01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7b4791a-48", "ovs_interfaceid": "a7b4791a-48da-4f2c-a8f0-debeceec3225", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2106.770793] env[63279]: DEBUG oslo_vmware.api [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087508, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.771153] env[63279]: DEBUG oslo_concurrency.lockutils [req-fa64a289-dd5b-480e-a353-b87d0888ba18 req-46efa5ad-79b0-4959-b1f7-23249730dee5 service nova] Acquired lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2106.771356] env[63279]: DEBUG nova.network.neutron [req-fa64a289-dd5b-480e-a353-b87d0888ba18 req-46efa5ad-79b0-4959-b1f7-23249730dee5 service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Refreshing network info cache for port a7b4791a-48da-4f2c-a8f0-debeceec3225 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2106.772769] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:22:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dcf5c3f7-4e33-4f21-b323-3673930b789c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7b4791a-48da-4f2c-a8f0-debeceec3225', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2106.781521] env[63279]: DEBUG oslo.service.loopingcall [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2106.782251] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2106.787056] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e90bb4b-695d-4ed5-b94e-d0eb371d1b08 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.811569] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087506, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.813245] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2106.813245] env[63279]: value = "task-2087509" [ 2106.813245] env[63279]: _type = "Task" [ 2106.813245] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.820382] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521717fb-420b-7b10-e9ed-c7888d6a9582, 'name': SearchDatastore_Task, 'duration_secs': 0.018298} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.821523] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-228a070e-0f18-401b-abe7-5f2056809cec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.827060] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.830263] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2106.830263] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521a01a8-5bce-d028-7ef2-d81b6c4c7cde" [ 2106.830263] env[63279]: _type = "Task" [ 2106.830263] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.839615] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521a01a8-5bce-d028-7ef2-d81b6c4c7cde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.024795] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.582s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2107.025551] env[63279]: DEBUG nova.compute.manager [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2107.028523] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.581s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2107.030445] env[63279]: INFO nova.compute.claims [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2107.105904] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8600dfea-b562-47c8-be02-b00ae841b764 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "f2a68d73-49d6-4b38-aff1-c2eb850f2ca6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.807s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2107.195609] env[63279]: DEBUG oslo_vmware.api [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087507, 'name': PowerOffVM_Task, 'duration_secs': 0.287184} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.195892] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2107.196080] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2107.196339] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-723ad8d1-aff2-411f-808d-94aeae041a0b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.270012] env[63279]: DEBUG oslo_vmware.api [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087508, 'name': PowerOffVM_Task, 'duration_secs': 0.260215} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.270323] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2107.270500] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2107.270746] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8aec48f-efc2-4ffc-a1c7-7062ef00d39e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.294773] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087506, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545956} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.295071] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] f7f88f1a-a81a-4208-88d7-6a264e642ab1/f7f88f1a-a81a-4208-88d7-6a264e642ab1.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2107.295305] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2107.295555] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73e2018b-d843-4eb7-b59b-9e2141df1408 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.301109] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2107.301109] env[63279]: value = "task-2087512" [ 2107.301109] env[63279]: _type = "Task" [ 2107.301109] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.309241] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087512, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.321465] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.341062] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521a01a8-5bce-d028-7ef2-d81b6c4c7cde, 'name': SearchDatastore_Task, 'duration_secs': 0.020292} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.341062] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2107.341250] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] fcdd3076-2b53-4850-9730-2f877e2cabfd/fcdd3076-2b53-4850-9730-2f877e2cabfd.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2107.341533] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4c6fbd0-d3ad-4bbe-b143-b0597c446f54 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.347966] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2107.347966] env[63279]: value = "task-2087513" [ 2107.347966] env[63279]: _type = "Task" [ 2107.347966] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.355432] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087513, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.533776] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2107.534069] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2107.534324] env[63279]: DEBUG nova.network.neutron [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2107.536739] env[63279]: DEBUG nova.compute.utils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2107.541389] env[63279]: DEBUG nova.compute.manager [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2107.541389] env[63279]: DEBUG nova.network.neutron [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2107.597247] env[63279]: DEBUG nova.policy [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bfdf4a1d3ff3404fbc0bb9d3cc75a6dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '651b8183237a4e6dbef36aa2fb419f1b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2107.602191] env[63279]: DEBUG nova.network.neutron [req-fa64a289-dd5b-480e-a353-b87d0888ba18 req-46efa5ad-79b0-4959-b1f7-23249730dee5 service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Updated VIF entry in instance network info cache for port a7b4791a-48da-4f2c-a8f0-debeceec3225. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2107.602580] env[63279]: DEBUG nova.network.neutron [req-fa64a289-dd5b-480e-a353-b87d0888ba18 req-46efa5ad-79b0-4959-b1f7-23249730dee5 service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Updating instance_info_cache with network_info: [{"id": "a7b4791a-48da-4f2c-a8f0-debeceec3225", "address": "fa:16:3e:6d:22:31", "network": {"id": "9cfd684f-63e8-44aa-8569-0ab02c790458", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-318205966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f07bad15ea5419cbecc5840b4e96d01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7b4791a-48", "ovs_interfaceid": "a7b4791a-48da-4f2c-a8f0-debeceec3225", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2107.814043] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087512, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067949} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.814043] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2107.818347] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c056378e-9e15-465c-8853-0ba2f5c59bfe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.826546] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.843067] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] f7f88f1a-a81a-4208-88d7-6a264e642ab1/f7f88f1a-a81a-4208-88d7-6a264e642ab1.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2107.843408] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f0707c6-f930-4df3-b94a-1ced8832f16d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.868053] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087513, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.869538] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2107.869538] env[63279]: value = "task-2087514" [ 2107.869538] env[63279]: _type = "Task" [ 2107.869538] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.877839] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087514, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.948339] env[63279]: DEBUG nova.network.neutron [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Successfully created port: fa1d70e2-1628-429b-a1b6-a29d406767dd {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2108.041773] env[63279]: DEBUG nova.compute.manager [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2108.106135] env[63279]: DEBUG oslo_concurrency.lockutils [req-fa64a289-dd5b-480e-a353-b87d0888ba18 req-46efa5ad-79b0-4959-b1f7-23249730dee5 service nova] Releasing lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2108.117890] env[63279]: DEBUG nova.compute.manager [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2108.118788] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d5c934-3d28-480b-af61-75da26173e2f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.329747] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.352715] env[63279]: DEBUG nova.network.neutron [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating instance_info_cache with network_info: [{"id": "61682992-df73-44ba-b302-ba8e00c82f95", "address": "fa:16:3e:32:be:0b", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61682992-df", "ovs_interfaceid": "61682992-df73-44ba-b302-ba8e00c82f95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2108.371785] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087513, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.564297} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.374520] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] fcdd3076-2b53-4850-9730-2f877e2cabfd/fcdd3076-2b53-4850-9730-2f877e2cabfd.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2108.374741] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2108.375033] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9725a501-b55e-46c3-b733-04e3cdedc365 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.383697] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087514, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.385030] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2108.385030] env[63279]: value = "task-2087515" [ 2108.385030] env[63279]: _type = "Task" [ 2108.385030] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.395398] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087515, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.519741] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd03e886-9a27-46a7-a715-97b39b32aca1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.527558] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe029029-a9f0-4a48-a6cf-bbc4f64cdcf3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.561981] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ccd32b-2853-45bb-b1df-1a78505b5e79 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.569916] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4994dadf-6bbe-4626-96e2-14d98bfde03c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.584806] env[63279]: DEBUG nova.compute.provider_tree [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2108.633888] env[63279]: INFO nova.compute.manager [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] instance snapshotting [ 2108.637193] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5ca347-e0d9-430f-85e7-4aa780046bde {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.658844] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ab9c38-4021-44df-a5a7-cc16c12ef2e1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.824300] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.854808] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2108.881645] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087514, 'name': ReconfigVM_Task, 'duration_secs': 0.758333} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.882055] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Reconfigured VM instance instance-00000045 to attach disk [datastore1] f7f88f1a-a81a-4208-88d7-6a264e642ab1/f7f88f1a-a81a-4208-88d7-6a264e642ab1.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2108.882575] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c53b1e78-2c14-433e-bbed-d648024e9166 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.890026] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2108.890026] env[63279]: value = "task-2087516" [ 2108.890026] env[63279]: _type = "Task" [ 2108.890026] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.895888] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087515, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06983} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.896517] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2108.897310] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0245c71d-9853-441b-b497-d010f7193d12 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.903067] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087516, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.923714] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] fcdd3076-2b53-4850-9730-2f877e2cabfd/fcdd3076-2b53-4850-9730-2f877e2cabfd.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2108.924351] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd3019bf-5df5-417b-ac29-333f1f7a4d67 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.945017] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2108.945017] env[63279]: value = "task-2087517" [ 2108.945017] env[63279]: _type = "Task" [ 2108.945017] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.953334] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087517, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.066813] env[63279]: DEBUG nova.compute.manager [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2109.087850] env[63279]: DEBUG nova.scheduler.client.report [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2109.093565] env[63279]: DEBUG nova.virt.hardware [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2109.093805] env[63279]: DEBUG nova.virt.hardware [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2109.093966] env[63279]: DEBUG nova.virt.hardware [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2109.094173] env[63279]: DEBUG nova.virt.hardware [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2109.094325] env[63279]: DEBUG nova.virt.hardware [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2109.094479] env[63279]: DEBUG nova.virt.hardware [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2109.094683] env[63279]: DEBUG nova.virt.hardware [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2109.094844] env[63279]: DEBUG nova.virt.hardware [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2109.095029] env[63279]: DEBUG nova.virt.hardware [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2109.095206] env[63279]: DEBUG nova.virt.hardware [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2109.095385] env[63279]: DEBUG nova.virt.hardware [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2109.096229] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026c1e75-f538-4425-9741-cfb4ac62b88d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.106834] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0776a719-b286-45dc-8778-948687cb2cee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.171146] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2109.171473] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-37b358f0-1522-4174-8aab-5faf436852b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.178541] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2109.178541] env[63279]: value = "task-2087518" [ 2109.178541] env[63279]: _type = "Task" [ 2109.178541] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.187620] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087518, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.324339] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.377391] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2165be-0482-47f2-b0c1-ca613c93ab72 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.400135] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc31f8d2-91cb-4403-a1d7-cfef7a57d2ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.407746] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087516, 'name': Rename_Task, 'duration_secs': 0.131623} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.410158] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2109.410535] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating instance 'b981ac83-6c23-4d44-bd28-12da30d746bd' progress to 83 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2109.414112] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d85bade1-f2c2-447d-9111-f74541b2fdc8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.421090] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2109.421090] env[63279]: value = "task-2087519" [ 2109.421090] env[63279]: _type = "Task" [ 2109.421090] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.429443] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087519, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.456293] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087517, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.603022] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.572s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2109.603022] env[63279]: DEBUG nova.compute.manager [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2109.604183] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.854s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2109.604597] env[63279]: DEBUG nova.objects.instance [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lazy-loading 'resources' on Instance uuid 2f5e22f6-ba70-4848-965b-eb1553115323 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2109.688077] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087518, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.827354] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.918228] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2109.918562] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-851d3983-6315-4ddd-9e7c-94b7589d90cc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.926142] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2109.926142] env[63279]: value = "task-2087520" [ 2109.926142] env[63279]: _type = "Task" [ 2109.926142] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.932857] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087519, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.937593] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087520, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.953864] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087517, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.107972] env[63279]: DEBUG nova.compute.utils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2110.109741] env[63279]: DEBUG nova.objects.instance [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lazy-loading 'numa_topology' on Instance uuid 2f5e22f6-ba70-4848-965b-eb1553115323 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2110.114211] env[63279]: DEBUG nova.compute.manager [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2110.114395] env[63279]: DEBUG nova.network.neutron [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2110.155668] env[63279]: DEBUG nova.policy [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa50825418fe43369d1a2e8fc78864f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c730dba579254c899494d328e1f8605e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2110.190625] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087518, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.327668] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.440233] env[63279]: DEBUG oslo_vmware.api [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087519, 'name': PowerOnVM_Task, 'duration_secs': 0.959739} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.449017] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2110.449017] env[63279]: DEBUG nova.compute.manager [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2110.449017] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087520, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.449017] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae592cb-2114-4e12-b4bd-17f98dcdd36e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.466195] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087517, 'name': ReconfigVM_Task, 'duration_secs': 1.412083} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.467518] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Reconfigured VM instance instance-00000047 to attach disk [datastore1] fcdd3076-2b53-4850-9730-2f877e2cabfd/fcdd3076-2b53-4850-9730-2f877e2cabfd.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2110.469683] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5d115c5-1247-4df1-acf6-c89880b766c2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.475940] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2110.475940] env[63279]: value = "task-2087521" [ 2110.475940] env[63279]: _type = "Task" [ 2110.475940] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.485203] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087521, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.519396] env[63279]: DEBUG nova.network.neutron [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Successfully created port: d7d74026-baa1-4909-9860-a7d5ebe08169 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2110.614754] env[63279]: DEBUG nova.compute.manager [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2110.617668] env[63279]: DEBUG nova.objects.base [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Object Instance<2f5e22f6-ba70-4848-965b-eb1553115323> lazy-loaded attributes: resources,numa_topology {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2110.693799] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087518, 'name': CreateSnapshot_Task, 'duration_secs': 1.206154} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.694133] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2110.695082] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2a2f36-fe6b-4265-a869-2dc82a3eb8a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.830936] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.940895] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087520, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.976430] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2110.989268] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087521, 'name': Rename_Task, 'duration_secs': 0.195883} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.989589] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2110.989845] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-544c7f07-57e5-4e92-9e2b-4a46a78cfa53 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.998304] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2110.998304] env[63279]: value = "task-2087522" [ 2110.998304] env[63279]: _type = "Task" [ 2110.998304] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.009084] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087522, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.096977] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25e8296-d428-4b75-b77d-d7fbe7698b75 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.104538] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5e8372-1b92-4a43-b935-e2abbdee62fa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.139223] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf68fd0-46ab-45ad-bfc8-e871df214f97 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.147387] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950c332d-9dec-48a6-835f-47de07776cc0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.161029] env[63279]: DEBUG nova.compute.provider_tree [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2111.219506] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2111.219785] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d365c4f0-aaf1-4916-9b03-edac83275cb1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.228080] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2111.228080] env[63279]: value = "task-2087523" [ 2111.228080] env[63279]: _type = "Task" [ 2111.228080] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.237795] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087523, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.329905] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.439037] env[63279]: DEBUG oslo_vmware.api [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087520, 'name': PowerOnVM_Task, 'duration_secs': 1.072025} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.439375] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2111.439602] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e49e054-a083-4e75-99cd-6d3ad402538a tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating instance 'b981ac83-6c23-4d44-bd28-12da30d746bd' progress to 100 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2111.511334] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087522, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.644387] env[63279]: DEBUG nova.compute.manager [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2111.665106] env[63279]: DEBUG nova.scheduler.client.report [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2111.676705] env[63279]: DEBUG nova.virt.hardware [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2111.677017] env[63279]: DEBUG nova.virt.hardware [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2111.677185] env[63279]: DEBUG nova.virt.hardware [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2111.677481] env[63279]: DEBUG nova.virt.hardware [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2111.677636] env[63279]: DEBUG nova.virt.hardware [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2111.677804] env[63279]: DEBUG nova.virt.hardware [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2111.678055] env[63279]: DEBUG nova.virt.hardware [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2111.678233] env[63279]: DEBUG nova.virt.hardware [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2111.678421] env[63279]: DEBUG nova.virt.hardware [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2111.678622] env[63279]: DEBUG nova.virt.hardware [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2111.678810] env[63279]: DEBUG nova.virt.hardware [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2111.679815] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8109d78-2b73-49c4-a49d-2fc9c607957e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.688209] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07d738e-3127-4e61-9606-990cf492b813 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.737199] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087523, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.827966] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.954290] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquiring lock "f7f88f1a-a81a-4208-88d7-6a264e642ab1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2111.954624] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Lock "f7f88f1a-a81a-4208-88d7-6a264e642ab1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.954870] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquiring lock "f7f88f1a-a81a-4208-88d7-6a264e642ab1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2111.955093] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Lock "f7f88f1a-a81a-4208-88d7-6a264e642ab1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.955296] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Lock "f7f88f1a-a81a-4208-88d7-6a264e642ab1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.960576] env[63279]: INFO nova.compute.manager [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Terminating instance [ 2112.008472] env[63279]: DEBUG oslo_vmware.api [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087522, 'name': PowerOnVM_Task, 'duration_secs': 0.874854} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.008743] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2112.008946] env[63279]: INFO nova.compute.manager [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Took 10.14 seconds to spawn the instance on the hypervisor. [ 2112.009140] env[63279]: DEBUG nova.compute.manager [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2112.011039] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0fb39e-977c-45a1-a643-c322f46faddc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.171044] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.567s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2112.176073] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 33.657s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.238726] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087523, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.329194] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.463298] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquiring lock "refresh_cache-f7f88f1a-a81a-4208-88d7-6a264e642ab1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2112.463608] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquired lock "refresh_cache-f7f88f1a-a81a-4208-88d7-6a264e642ab1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2112.463905] env[63279]: DEBUG nova.network.neutron [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2112.527535] env[63279]: INFO nova.compute.manager [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Took 50.95 seconds to build instance. [ 2112.684335] env[63279]: DEBUG nova.objects.instance [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lazy-loading 'migration_context' on Instance uuid f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2112.685640] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0abf34c8-85b0-43dd-ac7c-864a19d7c008 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "2f5e22f6-ba70-4848-965b-eb1553115323" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 59.109s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2112.686889] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "2f5e22f6-ba70-4848-965b-eb1553115323" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 34.523s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.688313] env[63279]: INFO nova.compute.manager [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Unshelving [ 2112.741701] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087523, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.833900] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.877076] env[63279]: DEBUG nova.compute.manager [req-669265c1-d7c2-425c-b5cc-a3cbe31c2349 req-01170928-39bc-4719-b23e-f8998b3c209e service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Received event network-changed-7baa1106-63c3-49cd-8d80-54c763d1b2b3 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2112.877076] env[63279]: DEBUG nova.compute.manager [req-669265c1-d7c2-425c-b5cc-a3cbe31c2349 req-01170928-39bc-4719-b23e-f8998b3c209e service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Refreshing instance network info cache due to event network-changed-7baa1106-63c3-49cd-8d80-54c763d1b2b3. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2112.877859] env[63279]: DEBUG oslo_concurrency.lockutils [req-669265c1-d7c2-425c-b5cc-a3cbe31c2349 req-01170928-39bc-4719-b23e-f8998b3c209e service nova] Acquiring lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2112.877859] env[63279]: DEBUG oslo_concurrency.lockutils [req-669265c1-d7c2-425c-b5cc-a3cbe31c2349 req-01170928-39bc-4719-b23e-f8998b3c209e service nova] Acquired lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2112.877859] env[63279]: DEBUG nova.network.neutron [req-669265c1-d7c2-425c-b5cc-a3cbe31c2349 req-01170928-39bc-4719-b23e-f8998b3c209e service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Refreshing network info cache for port 7baa1106-63c3-49cd-8d80-54c763d1b2b3 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2112.985235] env[63279]: DEBUG nova.network.neutron [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2113.029381] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fc734b0-6cb8-42f2-afc5-bfa1456ea58d tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.213s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.071833] env[63279]: DEBUG nova.network.neutron [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2113.239376] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087523, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.331648] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.574635] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Releasing lock "refresh_cache-f7f88f1a-a81a-4208-88d7-6a264e642ab1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2113.575284] env[63279]: DEBUG nova.compute.manager [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2113.575742] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2113.577573] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da92adc8-b24b-4a40-b5e8-59aa459d559a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.586082] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2113.586523] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0fba551-3ce3-4a66-a2e3-a44a7df6397d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.598119] env[63279]: DEBUG oslo_vmware.api [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2113.598119] env[63279]: value = "task-2087524" [ 2113.598119] env[63279]: _type = "Task" [ 2113.598119] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.610685] env[63279]: DEBUG oslo_vmware.api [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.639950] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b05e1a5-9a8f-4f47-a61b-394b0dc3fb8f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.647989] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff19f2e0-bd16-4968-ae2c-c9169dc9a154 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.687018] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1322e3e-4925-4ce4-a6b3-e5f8bb6386d9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.696392] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3db73d7-9474-4337-9df3-c66b62b5947e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.718217] env[63279]: DEBUG nova.compute.provider_tree [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2113.720485] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2113.740862] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087523, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.830806] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.879934] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2113.880236] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2113.880455] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Deleting the datastore file [datastore1] cd00cb0e-30e5-4a0c-8612-ea92e5e32edd {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2113.881419] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-124a0a0b-eebe-4fe1-b81c-abf55f5ccbee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.883641] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2113.883942] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2113.884186] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Deleting the datastore file [datastore1] fd9b1666-8e06-4ed0-9187-05a40e136a1d {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2113.884476] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef7cf1f6-7386-42ed-a50b-96931b5bed09 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.891539] env[63279]: DEBUG oslo_vmware.api [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2113.891539] env[63279]: value = "task-2087525" [ 2113.891539] env[63279]: _type = "Task" [ 2113.891539] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.897024] env[63279]: DEBUG oslo_vmware.api [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for the task: (returnval){ [ 2113.897024] env[63279]: value = "task-2087526" [ 2113.897024] env[63279]: _type = "Task" [ 2113.897024] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.908794] env[63279]: DEBUG oslo_vmware.api [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087525, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.914018] env[63279]: DEBUG oslo_vmware.api [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087526, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.930018] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "b981ac83-6c23-4d44-bd28-12da30d746bd" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2113.930018] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "b981ac83-6c23-4d44-bd28-12da30d746bd" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2113.930018] env[63279]: DEBUG nova.compute.manager [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Going to confirm migration 3 {{(pid=63279) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 2113.985452] env[63279]: DEBUG nova.network.neutron [req-669265c1-d7c2-425c-b5cc-a3cbe31c2349 req-01170928-39bc-4719-b23e-f8998b3c209e service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updated VIF entry in instance network info cache for port 7baa1106-63c3-49cd-8d80-54c763d1b2b3. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2113.986131] env[63279]: DEBUG nova.network.neutron [req-669265c1-d7c2-425c-b5cc-a3cbe31c2349 req-01170928-39bc-4719-b23e-f8998b3c209e service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance_info_cache with network_info: [{"id": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "address": "fa:16:3e:cc:16:0a", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7baa1106-63", "ovs_interfaceid": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2114.106343] env[63279]: DEBUG oslo_vmware.api [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087524, 'name': PowerOffVM_Task, 'duration_secs': 0.125031} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.106615] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2114.106818] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2114.107053] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e962a0cf-64ce-4c56-88bf-826673c7f573 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.132740] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2114.132995] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2114.133202] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Deleting the datastore file [datastore1] f7f88f1a-a81a-4208-88d7-6a264e642ab1 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2114.133478] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-472c842c-0617-4f8e-8cdb-225a4190b9d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.140088] env[63279]: DEBUG oslo_vmware.api [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for the task: (returnval){ [ 2114.140088] env[63279]: value = "task-2087528" [ 2114.140088] env[63279]: _type = "Task" [ 2114.140088] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.148943] env[63279]: DEBUG oslo_vmware.api [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087528, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.222344] env[63279]: DEBUG nova.scheduler.client.report [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2114.243062] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087523, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.333417] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087509, 'name': CreateVM_Task, 'duration_secs': 7.273491} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.333751] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2114.336017] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.336017] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.336017] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2114.336017] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-532dcb29-b45b-4c45-9bb5-a03e1316853f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.343210] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2114.343210] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52836306-8c12-00c2-ba8a-5e8e69b19cf8" [ 2114.343210] env[63279]: _type = "Task" [ 2114.343210] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.351465] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52836306-8c12-00c2-ba8a-5e8e69b19cf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.407186] env[63279]: DEBUG oslo_vmware.api [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087525, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173805} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.407883] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2114.408162] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2114.408400] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2114.408627] env[63279]: INFO nova.compute.manager [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Took 7.74 seconds to destroy the instance on the hypervisor. [ 2114.408943] env[63279]: DEBUG oslo.service.loopingcall [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2114.409205] env[63279]: DEBUG nova.compute.manager [-] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2114.409482] env[63279]: DEBUG nova.network.neutron [-] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2114.416304] env[63279]: DEBUG oslo_vmware.api [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Task: {'id': task-2087526, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168793} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.416304] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2114.416304] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2114.416304] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2114.416304] env[63279]: INFO nova.compute.manager [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Took 7.67 seconds to destroy the instance on the hypervisor. [ 2114.416634] env[63279]: DEBUG oslo.service.loopingcall [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2114.416634] env[63279]: DEBUG nova.compute.manager [-] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2114.416634] env[63279]: DEBUG nova.network.neutron [-] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2114.490403] env[63279]: DEBUG oslo_concurrency.lockutils [req-669265c1-d7c2-425c-b5cc-a3cbe31c2349 req-01170928-39bc-4719-b23e-f8998b3c209e service nova] Releasing lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2114.491760] env[63279]: DEBUG nova.compute.manager [req-50e04ffb-663c-423c-b7be-f9005e112e91 req-a467233d-1b64-4c19-b1d4-9a56b82ad23c service nova] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Received event network-vif-plugged-fa1d70e2-1628-429b-a1b6-a29d406767dd {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2114.491873] env[63279]: DEBUG oslo_concurrency.lockutils [req-50e04ffb-663c-423c-b7be-f9005e112e91 req-a467233d-1b64-4c19-b1d4-9a56b82ad23c service nova] Acquiring lock "668436f9-94e9-48c2-a3d4-3df7bbcf5775-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2114.492495] env[63279]: DEBUG oslo_concurrency.lockutils [req-50e04ffb-663c-423c-b7be-f9005e112e91 req-a467233d-1b64-4c19-b1d4-9a56b82ad23c service nova] Lock "668436f9-94e9-48c2-a3d4-3df7bbcf5775-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2114.492736] env[63279]: DEBUG oslo_concurrency.lockutils [req-50e04ffb-663c-423c-b7be-f9005e112e91 req-a467233d-1b64-4c19-b1d4-9a56b82ad23c service nova] Lock "668436f9-94e9-48c2-a3d4-3df7bbcf5775-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.492960] env[63279]: DEBUG nova.compute.manager [req-50e04ffb-663c-423c-b7be-f9005e112e91 req-a467233d-1b64-4c19-b1d4-9a56b82ad23c service nova] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] No waiting events found dispatching network-vif-plugged-fa1d70e2-1628-429b-a1b6-a29d406767dd {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2114.493188] env[63279]: WARNING nova.compute.manager [req-50e04ffb-663c-423c-b7be-f9005e112e91 req-a467233d-1b64-4c19-b1d4-9a56b82ad23c service nova] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Received unexpected event network-vif-plugged-fa1d70e2-1628-429b-a1b6-a29d406767dd for instance with vm_state building and task_state spawning. [ 2114.546758] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.546942] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.547139] env[63279]: DEBUG nova.network.neutron [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2114.547405] env[63279]: DEBUG nova.objects.instance [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lazy-loading 'info_cache' on Instance uuid b981ac83-6c23-4d44-bd28-12da30d746bd {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2114.649807] env[63279]: DEBUG oslo_vmware.api [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Task: {'id': task-2087528, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.113809} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.650342] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2114.650545] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2114.650730] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2114.650909] env[63279]: INFO nova.compute.manager [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Took 1.08 seconds to destroy the instance on the hypervisor. [ 2114.651187] env[63279]: DEBUG oslo.service.loopingcall [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2114.651379] env[63279]: DEBUG nova.compute.manager [-] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2114.651476] env[63279]: DEBUG nova.network.neutron [-] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2114.682872] env[63279]: DEBUG nova.network.neutron [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Successfully updated port: fa1d70e2-1628-429b-a1b6-a29d406767dd {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2114.684688] env[63279]: DEBUG nova.network.neutron [-] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2114.740776] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087523, 'name': CloneVM_Task, 'duration_secs': 3.314938} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.741155] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Created linked-clone VM from snapshot [ 2114.741795] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b43b01-ece7-414f-9eae-afeb1121fafd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.749254] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Uploading image d2d04a47-fc99-4ff0-adbc-a27616252dd0 {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2114.779846] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2114.779846] env[63279]: value = "vm-427695" [ 2114.779846] env[63279]: _type = "VirtualMachine" [ 2114.779846] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2114.780191] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2757a565-0e6a-4c7b-8ccc-fb61452f62e2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.787592] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lease: (returnval){ [ 2114.787592] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5237cad6-6872-4633-77ec-6d3f21c08062" [ 2114.787592] env[63279]: _type = "HttpNfcLease" [ 2114.787592] env[63279]: } obtained for exporting VM: (result){ [ 2114.787592] env[63279]: value = "vm-427695" [ 2114.787592] env[63279]: _type = "VirtualMachine" [ 2114.787592] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2114.787838] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the lease: (returnval){ [ 2114.787838] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5237cad6-6872-4633-77ec-6d3f21c08062" [ 2114.787838] env[63279]: _type = "HttpNfcLease" [ 2114.787838] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2114.796778] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2114.796778] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5237cad6-6872-4633-77ec-6d3f21c08062" [ 2114.796778] env[63279]: _type = "HttpNfcLease" [ 2114.796778] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2114.852424] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52836306-8c12-00c2-ba8a-5e8e69b19cf8, 'name': SearchDatastore_Task, 'duration_secs': 0.009735} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.852811] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2114.853105] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2114.853386] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.853577] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.853824] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2114.854121] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-142b0727-2785-41fd-99e7-164827de2360 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.864966] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2114.864966] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2114.864966] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98e5aead-45de-4f26-a158-ef7d3d1840d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.872459] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2114.872459] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521af2f9-7dba-d27f-bed0-a6a2560c2d08" [ 2114.872459] env[63279]: _type = "Task" [ 2114.872459] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.880905] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521af2f9-7dba-d27f-bed0-a6a2560c2d08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.908617] env[63279]: DEBUG nova.compute.manager [req-301da454-829a-47ac-8836-dad93b36e03b req-203d61b9-1ca6-469b-8249-d874df0a13f5 service nova] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Received event network-vif-plugged-d7d74026-baa1-4909-9860-a7d5ebe08169 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2114.908850] env[63279]: DEBUG oslo_concurrency.lockutils [req-301da454-829a-47ac-8836-dad93b36e03b req-203d61b9-1ca6-469b-8249-d874df0a13f5 service nova] Acquiring lock "a0697601-46ae-48ce-a3e1-3c4b81fc1f95-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2114.909186] env[63279]: DEBUG oslo_concurrency.lockutils [req-301da454-829a-47ac-8836-dad93b36e03b req-203d61b9-1ca6-469b-8249-d874df0a13f5 service nova] Lock "a0697601-46ae-48ce-a3e1-3c4b81fc1f95-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2114.909536] env[63279]: DEBUG oslo_concurrency.lockutils [req-301da454-829a-47ac-8836-dad93b36e03b req-203d61b9-1ca6-469b-8249-d874df0a13f5 service nova] Lock "a0697601-46ae-48ce-a3e1-3c4b81fc1f95-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.909726] env[63279]: DEBUG nova.compute.manager [req-301da454-829a-47ac-8836-dad93b36e03b req-203d61b9-1ca6-469b-8249-d874df0a13f5 service nova] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] No waiting events found dispatching network-vif-plugged-d7d74026-baa1-4909-9860-a7d5ebe08169 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2114.909937] env[63279]: WARNING nova.compute.manager [req-301da454-829a-47ac-8836-dad93b36e03b req-203d61b9-1ca6-469b-8249-d874df0a13f5 service nova] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Received unexpected event network-vif-plugged-d7d74026-baa1-4909-9860-a7d5ebe08169 for instance with vm_state building and task_state spawning. [ 2114.910160] env[63279]: DEBUG nova.compute.manager [req-301da454-829a-47ac-8836-dad93b36e03b req-203d61b9-1ca6-469b-8249-d874df0a13f5 service nova] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Received event network-vif-deleted-c7c4391f-94e6-42ba-9742-b3c94c1a79dc {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2114.910405] env[63279]: INFO nova.compute.manager [req-301da454-829a-47ac-8836-dad93b36e03b req-203d61b9-1ca6-469b-8249-d874df0a13f5 service nova] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Neutron deleted interface c7c4391f-94e6-42ba-9742-b3c94c1a79dc; detaching it from the instance and deleting it from the info cache [ 2114.910611] env[63279]: DEBUG nova.network.neutron [req-301da454-829a-47ac-8836-dad93b36e03b req-203d61b9-1ca6-469b-8249-d874df0a13f5 service nova] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2114.937079] env[63279]: DEBUG nova.network.neutron [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Successfully updated port: d7d74026-baa1-4909-9860-a7d5ebe08169 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2115.187669] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "refresh_cache-668436f9-94e9-48c2-a3d4-3df7bbcf5775" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2115.187848] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquired lock "refresh_cache-668436f9-94e9-48c2-a3d4-3df7bbcf5775" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.188009] env[63279]: DEBUG nova.network.neutron [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2115.189156] env[63279]: DEBUG nova.network.neutron [-] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2115.234131] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.058s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2115.240471] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.369s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2115.241760] env[63279]: INFO nova.compute.claims [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2115.288804] env[63279]: DEBUG nova.network.neutron [-] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2115.298079] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2115.298079] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5237cad6-6872-4633-77ec-6d3f21c08062" [ 2115.298079] env[63279]: _type = "HttpNfcLease" [ 2115.298079] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2115.298079] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2115.298079] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5237cad6-6872-4633-77ec-6d3f21c08062" [ 2115.298079] env[63279]: _type = "HttpNfcLease" [ 2115.298079] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2115.298999] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa41941b-3282-4fed-8848-d351fe7933f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.307683] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5280ec89-3902-fd45-d233-4275ba1570cc/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2115.307870] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5280ec89-3902-fd45-d233-4275ba1570cc/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2115.370891] env[63279]: DEBUG nova.network.neutron [-] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2115.382943] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521af2f9-7dba-d27f-bed0-a6a2560c2d08, 'name': SearchDatastore_Task, 'duration_secs': 0.009033} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.384099] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e995f32-8804-448e-8b5f-f19244b50cbe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.390261] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2115.390261] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52fdb1f4-e466-dc30-4d4c-284f5893d61a" [ 2115.390261] env[63279]: _type = "Task" [ 2115.390261] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.400167] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fdb1f4-e466-dc30-4d4c-284f5893d61a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.413458] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a3e2e13-eff1-4f50-a42f-8d22b911582d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.422715] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5377fa-df66-4a0c-926b-05de4473fd33 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.440687] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Acquiring lock "refresh_cache-a0697601-46ae-48ce-a3e1-3c4b81fc1f95" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2115.440687] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Acquired lock "refresh_cache-a0697601-46ae-48ce-a3e1-3c4b81fc1f95" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.440687] env[63279]: DEBUG nova.network.neutron [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2115.442017] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b62715ab-34b2-4404-9499-a09700ea4083 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.465607] env[63279]: DEBUG nova.compute.manager [req-301da454-829a-47ac-8836-dad93b36e03b req-203d61b9-1ca6-469b-8249-d874df0a13f5 service nova] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Detach interface failed, port_id=c7c4391f-94e6-42ba-9742-b3c94c1a79dc, reason: Instance fd9b1666-8e06-4ed0-9187-05a40e136a1d could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2115.694391] env[63279]: INFO nova.compute.manager [-] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Took 1.04 seconds to deallocate network for instance. [ 2115.735157] env[63279]: DEBUG nova.network.neutron [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2115.794128] env[63279]: INFO nova.compute.manager [-] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Took 1.38 seconds to deallocate network for instance. [ 2115.873182] env[63279]: INFO nova.compute.manager [-] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Took 1.46 seconds to deallocate network for instance. [ 2115.880607] env[63279]: DEBUG nova.network.neutron [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating instance_info_cache with network_info: [{"id": "61682992-df73-44ba-b302-ba8e00c82f95", "address": "fa:16:3e:32:be:0b", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61682992-df", "ovs_interfaceid": "61682992-df73-44ba-b302-ba8e00c82f95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2115.907616] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fdb1f4-e466-dc30-4d4c-284f5893d61a, 'name': SearchDatastore_Task, 'duration_secs': 0.00949} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.907616] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2115.907616] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ee1b4746-49ac-425c-8219-4d54cb34abe0/ee1b4746-49ac-425c-8219-4d54cb34abe0.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2115.907616] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1e169d5-27b4-4f08-8a2b-4d0c02173741 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.914801] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2115.914801] env[63279]: value = "task-2087530" [ 2115.914801] env[63279]: _type = "Task" [ 2115.914801] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.925440] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.989789] env[63279]: DEBUG nova.network.neutron [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Updating instance_info_cache with network_info: [{"id": "fa1d70e2-1628-429b-a1b6-a29d406767dd", "address": "fa:16:3e:92:3b:e1", "network": {"id": "4f155366-ade7-4d4b-8fad-a2b55798980f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-669492310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651b8183237a4e6dbef36aa2fb419f1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa1d70e2-16", "ovs_interfaceid": "fa1d70e2-1628-429b-a1b6-a29d406767dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2116.004105] env[63279]: DEBUG nova.network.neutron [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2116.200122] env[63279]: DEBUG nova.network.neutron [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Updating instance_info_cache with network_info: [{"id": "d7d74026-baa1-4909-9860-a7d5ebe08169", "address": "fa:16:3e:0a:3d:21", "network": {"id": "0c1f594e-4288-4945-84f2-a8c803e3d242", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1209546001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c730dba579254c899494d328e1f8605e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7d74026-ba", "ovs_interfaceid": "d7d74026-baa1-4909-9860-a7d5ebe08169", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2116.206138] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2116.303200] env[63279]: DEBUG oslo_concurrency.lockutils [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2116.381068] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2116.384361] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "refresh_cache-b981ac83-6c23-4d44-bd28-12da30d746bd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.384820] env[63279]: DEBUG nova.objects.instance [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lazy-loading 'migration_context' on Instance uuid b981ac83-6c23-4d44-bd28-12da30d746bd {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2116.426041] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087530, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.492944] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Releasing lock "refresh_cache-668436f9-94e9-48c2-a3d4-3df7bbcf5775" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.494398] env[63279]: DEBUG nova.compute.manager [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Instance network_info: |[{"id": "fa1d70e2-1628-429b-a1b6-a29d406767dd", "address": "fa:16:3e:92:3b:e1", "network": {"id": "4f155366-ade7-4d4b-8fad-a2b55798980f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-669492310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651b8183237a4e6dbef36aa2fb419f1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa1d70e2-16", "ovs_interfaceid": "fa1d70e2-1628-429b-a1b6-a29d406767dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2116.494650] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:3b:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50cf0a70-948d-4611-af05-94c1483064ed', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa1d70e2-1628-429b-a1b6-a29d406767dd', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2116.501870] env[63279]: DEBUG oslo.service.loopingcall [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2116.504693] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2116.505403] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab136680-8745-471e-b744-d5178c360c00 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.528869] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2116.528869] env[63279]: value = "task-2087531" [ 2116.528869] env[63279]: _type = "Task" [ 2116.528869] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.537598] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087531, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.564191] env[63279]: DEBUG nova.compute.manager [req-b37880b0-f6f6-428a-85c9-15586a6c7aa0 req-c07c546d-e505-4e03-82e4-a03c3287962a service nova] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Received event network-changed-fa1d70e2-1628-429b-a1b6-a29d406767dd {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2116.565302] env[63279]: DEBUG nova.compute.manager [req-b37880b0-f6f6-428a-85c9-15586a6c7aa0 req-c07c546d-e505-4e03-82e4-a03c3287962a service nova] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Refreshing instance network info cache due to event network-changed-fa1d70e2-1628-429b-a1b6-a29d406767dd. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2116.565302] env[63279]: DEBUG oslo_concurrency.lockutils [req-b37880b0-f6f6-428a-85c9-15586a6c7aa0 req-c07c546d-e505-4e03-82e4-a03c3287962a service nova] Acquiring lock "refresh_cache-668436f9-94e9-48c2-a3d4-3df7bbcf5775" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2116.565302] env[63279]: DEBUG oslo_concurrency.lockutils [req-b37880b0-f6f6-428a-85c9-15586a6c7aa0 req-c07c546d-e505-4e03-82e4-a03c3287962a service nova] Acquired lock "refresh_cache-668436f9-94e9-48c2-a3d4-3df7bbcf5775" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2116.565302] env[63279]: DEBUG nova.network.neutron [req-b37880b0-f6f6-428a-85c9-15586a6c7aa0 req-c07c546d-e505-4e03-82e4-a03c3287962a service nova] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Refreshing network info cache for port fa1d70e2-1628-429b-a1b6-a29d406767dd {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2116.707712] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Releasing lock "refresh_cache-a0697601-46ae-48ce-a3e1-3c4b81fc1f95" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.708073] env[63279]: DEBUG nova.compute.manager [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Instance network_info: |[{"id": "d7d74026-baa1-4909-9860-a7d5ebe08169", "address": "fa:16:3e:0a:3d:21", "network": {"id": "0c1f594e-4288-4945-84f2-a8c803e3d242", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1209546001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c730dba579254c899494d328e1f8605e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7d74026-ba", "ovs_interfaceid": "d7d74026-baa1-4909-9860-a7d5ebe08169", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2116.708514] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:3d:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '63e45f61-1d9b-4660-8d25-89fb68d45cd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd7d74026-baa1-4909-9860-a7d5ebe08169', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2116.716342] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Creating folder: Project (c730dba579254c899494d328e1f8605e). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2116.719524] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df58a6ef-3eb7-4a45-8010-df9a066fc0bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.731102] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Created folder: Project (c730dba579254c899494d328e1f8605e) in parent group-v427491. [ 2116.731320] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Creating folder: Instances. Parent ref: group-v427697. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2116.733894] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70b5f782-d581-4d36-9530-b71f0a0ec0f4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.743085] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Created folder: Instances in parent group-v427697. [ 2116.743327] env[63279]: DEBUG oslo.service.loopingcall [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2116.743519] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2116.743918] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-120c91fd-f432-41a6-9041-f05306347faa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.767022] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2116.767022] env[63279]: value = "task-2087534" [ 2116.767022] env[63279]: _type = "Task" [ 2116.767022] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.777401] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087534, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.778367] env[63279]: INFO nova.compute.manager [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Swapping old allocation on dict_keys(['0ba7c625-a0fc-4d3c-b804-196d00f00137']) held by migration 6396462b-1d31-43f6-bcb9-97273bcbac6a for instance [ 2116.792747] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee9b097-2408-4fe6-b527-da0b11ea85b0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.800610] env[63279]: DEBUG nova.scheduler.client.report [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Overwriting current allocation {'allocations': {'0ba7c625-a0fc-4d3c-b804-196d00f00137': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 105}}, 'project_id': '0b41258d6a444b558b3f3256f2f7d6eb', 'user_id': 'a111f7e262b04ab9b1bc14397ce09b08', 'consumer_generation': 1} on consumer f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b {{(pid=63279) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 2116.803391] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0dfb970-4ed8-487c-bbe0-93604276d68f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.838970] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bdaeaf7-f400-45e2-9860-17993d7b75d2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.847970] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011e4364-a2f0-4734-89a7-43a3fe75a398 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.864126] env[63279]: DEBUG nova.compute.provider_tree [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2116.888008] env[63279]: DEBUG nova.objects.base [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2116.889220] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f32e5f-3170-45a6-9234-3cb62d751b62 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.910884] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2116.911173] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquired lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2116.911413] env[63279]: DEBUG nova.network.neutron [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2116.912601] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-021c48b8-ebd7-4b22-84fb-4251f08b9806 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.920338] env[63279]: DEBUG oslo_vmware.api [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2116.920338] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523515e8-3388-8525-be67-0356f9393f6e" [ 2116.920338] env[63279]: _type = "Task" [ 2116.920338] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.928034] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087530, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552921} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.928800] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ee1b4746-49ac-425c-8219-4d54cb34abe0/ee1b4746-49ac-425c-8219-4d54cb34abe0.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2116.929522] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2116.929522] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8630b25f-f221-441f-a748-ccb7f73925cb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.934900] env[63279]: DEBUG oslo_vmware.api [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523515e8-3388-8525-be67-0356f9393f6e, 'name': SearchDatastore_Task, 'duration_secs': 0.007351} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.935606] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2116.940298] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2116.940298] env[63279]: value = "task-2087535" [ 2116.940298] env[63279]: _type = "Task" [ 2116.940298] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.945393] env[63279]: DEBUG nova.compute.manager [req-dfeaad7b-f095-452a-a595-0a087721c1fe req-71eca863-fd3f-4d5e-81cf-a86d81a233de service nova] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Received event network-changed-d7d74026-baa1-4909-9860-a7d5ebe08169 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2116.945585] env[63279]: DEBUG nova.compute.manager [req-dfeaad7b-f095-452a-a595-0a087721c1fe req-71eca863-fd3f-4d5e-81cf-a86d81a233de service nova] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Refreshing instance network info cache due to event network-changed-d7d74026-baa1-4909-9860-a7d5ebe08169. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2116.945800] env[63279]: DEBUG oslo_concurrency.lockutils [req-dfeaad7b-f095-452a-a595-0a087721c1fe req-71eca863-fd3f-4d5e-81cf-a86d81a233de service nova] Acquiring lock "refresh_cache-a0697601-46ae-48ce-a3e1-3c4b81fc1f95" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2116.945947] env[63279]: DEBUG oslo_concurrency.lockutils [req-dfeaad7b-f095-452a-a595-0a087721c1fe req-71eca863-fd3f-4d5e-81cf-a86d81a233de service nova] Acquired lock "refresh_cache-a0697601-46ae-48ce-a3e1-3c4b81fc1f95" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2116.946129] env[63279]: DEBUG nova.network.neutron [req-dfeaad7b-f095-452a-a595-0a087721c1fe req-71eca863-fd3f-4d5e-81cf-a86d81a233de service nova] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Refreshing network info cache for port d7d74026-baa1-4909-9860-a7d5ebe08169 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2116.953795] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087535, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.039564] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087531, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.280188] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087534, 'name': CreateVM_Task, 'duration_secs': 0.473578} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.280398] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2117.281202] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2117.281394] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.281798] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2117.282086] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d232571-faba-47c0-a4ff-ec387903c990 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.287105] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Waiting for the task: (returnval){ [ 2117.287105] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52098517-8efc-3e85-a9bd-8a0d67b0f54d" [ 2117.287105] env[63279]: _type = "Task" [ 2117.287105] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.295149] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52098517-8efc-3e85-a9bd-8a0d67b0f54d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.332651] env[63279]: DEBUG nova.network.neutron [req-b37880b0-f6f6-428a-85c9-15586a6c7aa0 req-c07c546d-e505-4e03-82e4-a03c3287962a service nova] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Updated VIF entry in instance network info cache for port fa1d70e2-1628-429b-a1b6-a29d406767dd. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2117.333875] env[63279]: DEBUG nova.network.neutron [req-b37880b0-f6f6-428a-85c9-15586a6c7aa0 req-c07c546d-e505-4e03-82e4-a03c3287962a service nova] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Updating instance_info_cache with network_info: [{"id": "fa1d70e2-1628-429b-a1b6-a29d406767dd", "address": "fa:16:3e:92:3b:e1", "network": {"id": "4f155366-ade7-4d4b-8fad-a2b55798980f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-669492310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651b8183237a4e6dbef36aa2fb419f1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa1d70e2-16", "ovs_interfaceid": "fa1d70e2-1628-429b-a1b6-a29d406767dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2117.371252] env[63279]: DEBUG nova.scheduler.client.report [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2117.451674] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087535, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10502} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.454194] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2117.455271] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d45c877-e749-4c96-b66f-a6aa719cbb88 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.478696] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] ee1b4746-49ac-425c-8219-4d54cb34abe0/ee1b4746-49ac-425c-8219-4d54cb34abe0.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2117.479012] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3522b61-5843-4209-b6a2-0bce2f74a491 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.501321] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2117.501321] env[63279]: value = "task-2087536" [ 2117.501321] env[63279]: _type = "Task" [ 2117.501321] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.509839] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087536, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.541045] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087531, 'name': CreateVM_Task, 'duration_secs': 0.588118} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.541266] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2117.542024] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2117.798324] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52098517-8efc-3e85-a9bd-8a0d67b0f54d, 'name': SearchDatastore_Task, 'duration_secs': 0.009445} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.798324] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2117.798324] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2117.801313] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2117.801313] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.801313] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2117.801313] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.801965] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2117.801965] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a90b105-b7cc-4bdc-b08d-aa0483edca72 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.801965] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b2f5968-0ffd-4f3a-a2ba-06576da3016b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.806670] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2117.806670] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523a2232-6484-3eba-c817-ca6b23c91c61" [ 2117.806670] env[63279]: _type = "Task" [ 2117.806670] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.811637] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2117.811637] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2117.812412] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad482c5b-7d57-4b6e-b2e5-b10bf95e6d2f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.819481] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523a2232-6484-3eba-c817-ca6b23c91c61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.822465] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Waiting for the task: (returnval){ [ 2117.822465] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]524dc97a-b080-5948-8955-5637d3384861" [ 2117.822465] env[63279]: _type = "Task" [ 2117.822465] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.826284] env[63279]: DEBUG nova.network.neutron [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance_info_cache with network_info: [{"id": "cbbfde33-a0b6-4403-8a1e-d688a0a7147b", "address": "fa:16:3e:47:7e:05", "network": {"id": "1e875730-8e0e-4907-a6d2-776025ed7ab9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a935e86eee0a4c38adfe0367d2097a61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbbfde33-a0", "ovs_interfaceid": "cbbfde33-a0b6-4403-8a1e-d688a0a7147b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2117.832367] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524dc97a-b080-5948-8955-5637d3384861, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.839254] env[63279]: DEBUG oslo_concurrency.lockutils [req-b37880b0-f6f6-428a-85c9-15586a6c7aa0 req-c07c546d-e505-4e03-82e4-a03c3287962a service nova] Releasing lock "refresh_cache-668436f9-94e9-48c2-a3d4-3df7bbcf5775" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2117.839549] env[63279]: DEBUG nova.compute.manager [req-b37880b0-f6f6-428a-85c9-15586a6c7aa0 req-c07c546d-e505-4e03-82e4-a03c3287962a service nova] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Received event network-vif-deleted-4aff58e1-ec9c-4691-a371-76a7d2e41303 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2117.872026] env[63279]: DEBUG nova.network.neutron [req-dfeaad7b-f095-452a-a595-0a087721c1fe req-71eca863-fd3f-4d5e-81cf-a86d81a233de service nova] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Updated VIF entry in instance network info cache for port d7d74026-baa1-4909-9860-a7d5ebe08169. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2117.872026] env[63279]: DEBUG nova.network.neutron [req-dfeaad7b-f095-452a-a595-0a087721c1fe req-71eca863-fd3f-4d5e-81cf-a86d81a233de service nova] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Updating instance_info_cache with network_info: [{"id": "d7d74026-baa1-4909-9860-a7d5ebe08169", "address": "fa:16:3e:0a:3d:21", "network": {"id": "0c1f594e-4288-4945-84f2-a8c803e3d242", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1209546001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c730dba579254c899494d328e1f8605e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7d74026-ba", "ovs_interfaceid": "d7d74026-baa1-4909-9860-a7d5ebe08169", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2117.876755] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.637s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.877263] env[63279]: DEBUG nova.compute.manager [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2117.879908] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.401s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.881328] env[63279]: INFO nova.compute.claims [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2118.011180] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087536, 'name': ReconfigVM_Task, 'duration_secs': 0.287685} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.011518] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Reconfigured VM instance instance-00000048 to attach disk [datastore1] ee1b4746-49ac-425c-8219-4d54cb34abe0/ee1b4746-49ac-425c-8219-4d54cb34abe0.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2118.012109] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c31f4fe-b3e0-41c9-9296-45f19e207477 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.022998] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2118.022998] env[63279]: value = "task-2087537" [ 2118.022998] env[63279]: _type = "Task" [ 2118.022998] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.031453] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087537, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.319120] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523a2232-6484-3eba-c817-ca6b23c91c61, 'name': SearchDatastore_Task, 'duration_secs': 0.009562} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.319552] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.319725] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2118.320028] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2118.333108] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Releasing lock "refresh_cache-f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.333616] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2118.333956] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524dc97a-b080-5948-8955-5637d3384861, 'name': SearchDatastore_Task, 'duration_secs': 0.008775} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.334235] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2613a8b3-3a3e-4c4c-ad5c-27bc5bd5e9be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.336640] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-814898b4-2335-466a-8b49-1af81ad2cbcc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.343327] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Waiting for the task: (returnval){ [ 2118.343327] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5235d935-d872-e16d-0df4-b711432bcdb8" [ 2118.343327] env[63279]: _type = "Task" [ 2118.343327] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.344922] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2118.344922] env[63279]: value = "task-2087538" [ 2118.344922] env[63279]: _type = "Task" [ 2118.344922] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.367374] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5235d935-d872-e16d-0df4-b711432bcdb8, 'name': SearchDatastore_Task, 'duration_secs': 0.015728} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.371241] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.371557] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] a0697601-46ae-48ce-a3e1-3c4b81fc1f95/a0697601-46ae-48ce-a3e1-3c4b81fc1f95.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2118.371892] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087538, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.372639] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2118.372877] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2118.373142] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-686d2bb7-39a4-49ea-bc29-bb17eef80144 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.375280] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c2be26b-4d4a-47e7-969b-4e241ef25551 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.377576] env[63279]: DEBUG oslo_concurrency.lockutils [req-dfeaad7b-f095-452a-a595-0a087721c1fe req-71eca863-fd3f-4d5e-81cf-a86d81a233de service nova] Releasing lock "refresh_cache-a0697601-46ae-48ce-a3e1-3c4b81fc1f95" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.383118] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Waiting for the task: (returnval){ [ 2118.383118] env[63279]: value = "task-2087539" [ 2118.383118] env[63279]: _type = "Task" [ 2118.383118] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.388540] env[63279]: DEBUG nova.compute.utils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2118.394137] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2118.394137] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2118.394137] env[63279]: DEBUG nova.compute.manager [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2118.394137] env[63279]: DEBUG nova.network.neutron [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2118.395758] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-601ef90a-2c01-4907-81ed-d696e62101c7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.404124] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087539, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.408221] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2118.408221] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5221ffb7-e41e-507b-5b7f-cc27685d633d" [ 2118.408221] env[63279]: _type = "Task" [ 2118.408221] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.418832] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5221ffb7-e41e-507b-5b7f-cc27685d633d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.449883] env[63279]: DEBUG nova.policy [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c15683ebb604c9ba7d18e112476a8a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1fc9b60ae304455097b8be9a276796fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2118.532168] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087537, 'name': Rename_Task, 'duration_secs': 0.160516} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.532597] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2118.532899] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9351bde8-af8b-4b73-91fc-c4ba90496025 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.539397] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2118.539397] env[63279]: value = "task-2087540" [ 2118.539397] env[63279]: _type = "Task" [ 2118.539397] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.547703] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087540, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.837302] env[63279]: DEBUG nova.network.neutron [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Successfully created port: 72b975f4-3f92-4d65-8d8f-119eeb59c2ab {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2118.868022] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087538, 'name': PowerOffVM_Task, 'duration_secs': 0.190531} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.868314] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2118.868990] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:53:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='ca62e09f-97fd-4b30-aed4-3874eb2dace8',id=29,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-212803514',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2118.869229] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2118.869424] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2118.869630] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2118.869784] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2118.869934] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2118.870160] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2118.870350] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2118.870544] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2118.870726] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2118.870895] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2118.880538] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3611415-8332-4f6b-8131-930c935d90b1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.896734] env[63279]: DEBUG nova.compute.manager [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2118.913409] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2118.913409] env[63279]: value = "task-2087541" [ 2118.913409] env[63279]: _type = "Task" [ 2118.913409] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.928246] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087539, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.936277] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5221ffb7-e41e-507b-5b7f-cc27685d633d, 'name': SearchDatastore_Task, 'duration_secs': 0.013055} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.940928] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087541, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.941237] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-913a3f27-5816-4b1c-9c2d-2de6ba7aad7b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.952050] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2118.952050] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]528b1901-5005-63a4-42c6-ffa5710cf23e" [ 2118.952050] env[63279]: _type = "Task" [ 2118.952050] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.964058] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]528b1901-5005-63a4-42c6-ffa5710cf23e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.048910] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087540, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.406128] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087539, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.903102} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.406668] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] a0697601-46ae-48ce-a3e1-3c4b81fc1f95/a0697601-46ae-48ce-a3e1-3c4b81fc1f95.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2119.406835] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2119.407211] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b59b0de4-5b66-4b42-8bef-70338c285418 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.419664] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Waiting for the task: (returnval){ [ 2119.419664] env[63279]: value = "task-2087542" [ 2119.419664] env[63279]: _type = "Task" [ 2119.419664] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.425191] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eae6c91-a3c5-426d-b607-097d6dbfb7a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.438264] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087541, 'name': ReconfigVM_Task, 'duration_secs': 0.307339} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.444761] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087542, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.445812] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f424cb10-691e-40bf-8028-d5f2d779b24a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.449718] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2eaa89d-cc6f-4267-9f0e-151703e0a640 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.480761] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:53:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='ca62e09f-97fd-4b30-aed4-3874eb2dace8',id=29,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-212803514',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2119.480948] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2119.481148] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2119.481357] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2119.481514] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2119.481670] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2119.481900] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2119.482109] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2119.482424] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2119.482658] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2119.482867] env[63279]: DEBUG nova.virt.hardware [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2119.517720] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5ea270d-6d3f-4491-8ec6-036b97448399 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.528587] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f56cf7-2521-4906-9364-88e5f61017de {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.532750] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]528b1901-5005-63a4-42c6-ffa5710cf23e, 'name': SearchDatastore_Task, 'duration_secs': 0.076677} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.534303] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2119.534737] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 668436f9-94e9-48c2-a3d4-3df7bbcf5775/668436f9-94e9-48c2-a3d4-3df7bbcf5775.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2119.537363] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0bc06582-3204-4cc9-98d2-9fe592582179 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.544015] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2119.544015] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526aa760-fd28-52c0-7a46-1af880226279" [ 2119.544015] env[63279]: _type = "Task" [ 2119.544015] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.560927] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9945b6d-aaca-4a79-b8dd-6617d7d8d903 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.569854] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2119.569854] env[63279]: value = "task-2087543" [ 2119.569854] env[63279]: _type = "Task" [ 2119.569854] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.597054] env[63279]: DEBUG oslo_vmware.api [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087540, 'name': PowerOnVM_Task, 'duration_secs': 0.997933} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.597629] env[63279]: DEBUG nova.compute.provider_tree [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2119.599120] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526aa760-fd28-52c0-7a46-1af880226279, 'name': SearchDatastore_Task, 'duration_secs': 0.008655} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.600148] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2119.600386] env[63279]: INFO nova.compute.manager [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Took 15.23 seconds to spawn the instance on the hypervisor. [ 2119.600647] env[63279]: DEBUG nova.compute.manager [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2119.606420] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Reconfiguring VM instance instance-00000036 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2119.610950] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d32cd3-dabe-49ec-9610-d27ecd677e91 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.613635] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9691710-246f-4769-b60f-bd663eb1cc2f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.627529] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087543, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.636374] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2119.636374] env[63279]: value = "task-2087544" [ 2119.636374] env[63279]: _type = "Task" [ 2119.636374] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.645683] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087544, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.930071] env[63279]: DEBUG nova.compute.manager [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2119.938338] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087542, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089085} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.938655] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2119.939528] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8fb3181-f2de-4d8f-9aa2-e2474d2f538f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.963644] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] a0697601-46ae-48ce-a3e1-3c4b81fc1f95/a0697601-46ae-48ce-a3e1-3c4b81fc1f95.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2119.966177] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5f1fa6e-18ea-4012-ad5f-529a0f074232 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.987123] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Waiting for the task: (returnval){ [ 2119.987123] env[63279]: value = "task-2087545" [ 2119.987123] env[63279]: _type = "Task" [ 2119.987123] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.989448] env[63279]: DEBUG nova.virt.hardware [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2119.989697] env[63279]: DEBUG nova.virt.hardware [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2119.989876] env[63279]: DEBUG nova.virt.hardware [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2119.990226] env[63279]: DEBUG nova.virt.hardware [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2119.990226] env[63279]: DEBUG nova.virt.hardware [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2119.990395] env[63279]: DEBUG nova.virt.hardware [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2119.990618] env[63279]: DEBUG nova.virt.hardware [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2119.990781] env[63279]: DEBUG nova.virt.hardware [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2119.990954] env[63279]: DEBUG nova.virt.hardware [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2119.991137] env[63279]: DEBUG nova.virt.hardware [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2119.991318] env[63279]: DEBUG nova.virt.hardware [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2119.992232] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea89c2a-5d46-42cf-95cc-64410e124cfc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.006781] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.007766] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aae7e8e-df8e-468b-b445-55acf584e1e3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.081094] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087543, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.101378] env[63279]: DEBUG nova.scheduler.client.report [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2120.151618] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087544, 'name': ReconfigVM_Task, 'duration_secs': 0.249641} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.152222] env[63279]: INFO nova.compute.manager [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Took 56.14 seconds to build instance. [ 2120.153127] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Reconfigured VM instance instance-00000036 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2120.154366] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8a6e89-6ea6-4487-b56b-02677055fec0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.176912] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b/f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2120.177936] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a31da21-4a92-412d-a53d-fecd80d29aff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.199027] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2120.199027] env[63279]: value = "task-2087546" [ 2120.199027] env[63279]: _type = "Task" [ 2120.199027] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.205663] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087546, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.501802] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.572734] env[63279]: DEBUG nova.compute.manager [req-e5bbfdf3-cb84-45f1-8624-aae7f4d89f53 req-12459014-cfa1-477c-8e38-d2c5cab0e58b service nova] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Received event network-vif-plugged-72b975f4-3f92-4d65-8d8f-119eeb59c2ab {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2120.573016] env[63279]: DEBUG oslo_concurrency.lockutils [req-e5bbfdf3-cb84-45f1-8624-aae7f4d89f53 req-12459014-cfa1-477c-8e38-d2c5cab0e58b service nova] Acquiring lock "4ce17bdb-4bed-4e06-af13-e4097b55e17d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2120.573281] env[63279]: DEBUG oslo_concurrency.lockutils [req-e5bbfdf3-cb84-45f1-8624-aae7f4d89f53 req-12459014-cfa1-477c-8e38-d2c5cab0e58b service nova] Lock "4ce17bdb-4bed-4e06-af13-e4097b55e17d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2120.573492] env[63279]: DEBUG oslo_concurrency.lockutils [req-e5bbfdf3-cb84-45f1-8624-aae7f4d89f53 req-12459014-cfa1-477c-8e38-d2c5cab0e58b service nova] Lock "4ce17bdb-4bed-4e06-af13-e4097b55e17d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2120.573704] env[63279]: DEBUG nova.compute.manager [req-e5bbfdf3-cb84-45f1-8624-aae7f4d89f53 req-12459014-cfa1-477c-8e38-d2c5cab0e58b service nova] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] No waiting events found dispatching network-vif-plugged-72b975f4-3f92-4d65-8d8f-119eeb59c2ab {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2120.573898] env[63279]: WARNING nova.compute.manager [req-e5bbfdf3-cb84-45f1-8624-aae7f4d89f53 req-12459014-cfa1-477c-8e38-d2c5cab0e58b service nova] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Received unexpected event network-vif-plugged-72b975f4-3f92-4d65-8d8f-119eeb59c2ab for instance with vm_state building and task_state spawning. [ 2120.584087] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087543, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530097} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.585615] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 668436f9-94e9-48c2-a3d4-3df7bbcf5775/668436f9-94e9-48c2-a3d4-3df7bbcf5775.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2120.585615] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2120.585615] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8394c0c2-746a-4fe6-9dc0-0904c6600fb3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.592680] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2120.592680] env[63279]: value = "task-2087547" [ 2120.592680] env[63279]: _type = "Task" [ 2120.592680] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.603290] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087547, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.607373] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.727s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2120.607948] env[63279]: DEBUG nova.compute.manager [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2120.610943] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.716s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2120.611098] env[63279]: DEBUG nova.objects.instance [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2120.655211] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e3f5540-3a91-4676-9ed7-83f8c35479b1 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "ee1b4746-49ac-425c-8219-4d54cb34abe0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.798s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2120.657096] env[63279]: DEBUG nova.network.neutron [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Successfully updated port: 72b975f4-3f92-4d65-8d8f-119eeb59c2ab {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2120.713292] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087546, 'name': ReconfigVM_Task, 'duration_secs': 0.496905} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.713292] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Reconfigured VM instance instance-00000036 to attach disk [datastore1] f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b/f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2120.713292] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38abf8b-dce9-4bdc-b369-c5665c512364 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.735804] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf34d42-5aaa-4160-837e-f0611f3e1d67 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.760079] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1a6547-2d74-4fa8-9356-4aa99149f68e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.779544] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1489dade-5677-4d97-8673-f9dd200371ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.787078] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2120.787983] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bec5ac50-47ab-4e82-b7c4-45cdf3cd2fc8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.793698] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2120.793698] env[63279]: value = "task-2087548" [ 2120.793698] env[63279]: _type = "Task" [ 2120.793698] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.801717] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087548, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.002108] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087545, 'name': ReconfigVM_Task, 'duration_secs': 0.575838} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.002380] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Reconfigured VM instance instance-0000004a to attach disk [datastore1] a0697601-46ae-48ce-a3e1-3c4b81fc1f95/a0697601-46ae-48ce-a3e1-3c4b81fc1f95.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2121.003032] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4924a049-2a93-4fbc-8385-ac5787b8aa84 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.009486] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Waiting for the task: (returnval){ [ 2121.009486] env[63279]: value = "task-2087549" [ 2121.009486] env[63279]: _type = "Task" [ 2121.009486] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.017655] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087549, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.103334] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087547, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080921} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.103616] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2121.104602] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae8a9f1-ef5f-45b8-90e0-98d4fcc0d860 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.119652] env[63279]: DEBUG nova.compute.utils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2121.132250] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 668436f9-94e9-48c2-a3d4-3df7bbcf5775/668436f9-94e9-48c2-a3d4-3df7bbcf5775.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2121.133682] env[63279]: DEBUG nova.compute.manager [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2121.133862] env[63279]: DEBUG nova.network.neutron [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2121.135878] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96e0fcc3-055d-49e8-a4f8-d6137c3ccaae {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.157059] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2121.157059] env[63279]: value = "task-2087550" [ 2121.157059] env[63279]: _type = "Task" [ 2121.157059] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.160641] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "refresh_cache-4ce17bdb-4bed-4e06-af13-e4097b55e17d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2121.160794] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "refresh_cache-4ce17bdb-4bed-4e06-af13-e4097b55e17d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2121.160911] env[63279]: DEBUG nova.network.neutron [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2121.168732] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087550, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.191203] env[63279]: DEBUG nova.policy [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c15683ebb604c9ba7d18e112476a8a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1fc9b60ae304455097b8be9a276796fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2121.304620] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087548, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.449711] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "ee1b4746-49ac-425c-8219-4d54cb34abe0" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2121.450126] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "ee1b4746-49ac-425c-8219-4d54cb34abe0" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2121.450501] env[63279]: INFO nova.compute.manager [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Rebooting instance [ 2121.487930] env[63279]: DEBUG nova.network.neutron [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Successfully created port: 376a7f06-34a9-4c6e-934f-5470b0a04549 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2121.521346] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087549, 'name': Rename_Task, 'duration_secs': 0.322189} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.521717] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2121.521980] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c9cf3e4-1b69-490a-8ef7-b0777413f60b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.528609] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Waiting for the task: (returnval){ [ 2121.528609] env[63279]: value = "task-2087551" [ 2121.528609] env[63279]: _type = "Task" [ 2121.528609] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.537404] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.636700] env[63279]: DEBUG nova.compute.manager [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2121.638609] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c861fd4b-0567-45ed-bffa-9cb9322793d2 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.028s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2121.641033] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 36.046s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2121.676069] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087550, 'name': ReconfigVM_Task, 'duration_secs': 0.51108} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.676069] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 668436f9-94e9-48c2-a3d4-3df7bbcf5775/668436f9-94e9-48c2-a3d4-3df7bbcf5775.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2121.676069] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7513ac1-7e6d-4484-940f-8365eafe5ff1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.684370] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2121.684370] env[63279]: value = "task-2087552" [ 2121.684370] env[63279]: _type = "Task" [ 2121.684370] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.695142] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087552, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.730213] env[63279]: DEBUG nova.network.neutron [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2121.805820] env[63279]: DEBUG oslo_vmware.api [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087548, 'name': PowerOnVM_Task, 'duration_secs': 0.699743} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.806236] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2121.969143] env[63279]: DEBUG nova.network.neutron [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Updating instance_info_cache with network_info: [{"id": "72b975f4-3f92-4d65-8d8f-119eeb59c2ab", "address": "fa:16:3e:f0:d6:f2", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72b975f4-3f", "ovs_interfaceid": "72b975f4-3f92-4d65-8d8f-119eeb59c2ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2121.972639] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2121.972866] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquired lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2121.973174] env[63279]: DEBUG nova.network.neutron [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2122.039148] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087551, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.194289] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087552, 'name': Rename_Task, 'duration_secs': 0.211864} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.194641] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2122.194867] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-baba3d2e-8e4a-454d-9b71-64554d0a05b3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.201739] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2122.201739] env[63279]: value = "task-2087553" [ 2122.201739] env[63279]: _type = "Task" [ 2122.201739] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.211899] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087553, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.473484] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "refresh_cache-4ce17bdb-4bed-4e06-af13-e4097b55e17d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2122.473484] env[63279]: DEBUG nova.compute.manager [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Instance network_info: |[{"id": "72b975f4-3f92-4d65-8d8f-119eeb59c2ab", "address": "fa:16:3e:f0:d6:f2", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72b975f4-3f", "ovs_interfaceid": "72b975f4-3f92-4d65-8d8f-119eeb59c2ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2122.473785] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:d6:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0549820d-5649-40bc-ad6e-9ae27b384d90', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '72b975f4-3f92-4d65-8d8f-119eeb59c2ab', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2122.487388] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Creating folder: Project (1fc9b60ae304455097b8be9a276796fa). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2122.491505] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2fe29389-5810-42cf-a6e0-d10c57e86b98 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.503102] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Created folder: Project (1fc9b60ae304455097b8be9a276796fa) in parent group-v427491. [ 2122.503347] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Creating folder: Instances. Parent ref: group-v427700. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2122.503660] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27807d0c-1fb9-4acf-bf3c-73a91496b075 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.515789] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Created folder: Instances in parent group-v427700. [ 2122.516051] env[63279]: DEBUG oslo.service.loopingcall [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2122.516288] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2122.516581] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c076b4b-a29e-4411-a283-c605ea2d7e74 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.544240] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087551, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.546135] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2122.546135] env[63279]: value = "task-2087556" [ 2122.546135] env[63279]: _type = "Task" [ 2122.546135] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.554893] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087556, 'name': CreateVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.635044] env[63279]: DEBUG nova.compute.manager [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Received event network-changed-72b975f4-3f92-4d65-8d8f-119eeb59c2ab {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2122.635249] env[63279]: DEBUG nova.compute.manager [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Refreshing instance network info cache due to event network-changed-72b975f4-3f92-4d65-8d8f-119eeb59c2ab. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2122.635477] env[63279]: DEBUG oslo_concurrency.lockutils [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] Acquiring lock "refresh_cache-4ce17bdb-4bed-4e06-af13-e4097b55e17d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2122.635624] env[63279]: DEBUG oslo_concurrency.lockutils [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] Acquired lock "refresh_cache-4ce17bdb-4bed-4e06-af13-e4097b55e17d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2122.635802] env[63279]: DEBUG nova.network.neutron [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Refreshing network info cache for port 72b975f4-3f92-4d65-8d8f-119eeb59c2ab {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2122.659478] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Applying migration context for instance b981ac83-6c23-4d44-bd28-12da30d746bd as it has an incoming, in-progress migration 44650656-4b9e-403b-9bcc-56f29df93e8b. Migration status is confirming {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2122.663165] env[63279]: INFO nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating resource usage from migration 44650656-4b9e-403b-9bcc-56f29df93e8b [ 2122.667529] env[63279]: DEBUG nova.compute.manager [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2122.690475] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.690618] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 4a9088e0-2992-4b18-8be9-6bc70633369b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.690842] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 32dbef6d-d314-4fa6-972a-e7b1f22eb11d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.690842] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 5656c853-ac83-47be-83c4-979a9e87ab91 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.690996] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance c1ac4af5-b01e-4175-844f-7a67b2ef7526 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2122.691136] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance f375b54b-f9de-4529-b752-52c240aed532 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.691253] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance eca98392-98be-405b-b799-463ef9ee3dc8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.691370] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 0e12ab9b-a701-4e0f-9d96-939090f50494 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.691482] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 5b5f87cb-cf35-418f-b5bd-b953524a285c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.691616] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance cd00cb0e-30e5-4a0c-8612-ea92e5e32edd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2122.691753] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance fd9b1666-8e06-4ed0-9187-05a40e136a1d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2122.691967] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 6699de0a-b3f8-4d84-9c9b-d0f6899a606e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.691967] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance a15141bc-424d-48ca-a6d5-c859a3639a0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.692111] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance df963c29-a1c4-4f28-be95-cafe3af4d2fa is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2122.692255] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.692368] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 4871421f-0015-4973-bb5f-c9042d411c82 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2122.692494] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 246f0945-7290-4cb7-a982-b17cb1573002 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2122.692611] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance c8b42e3b-b841-4b79-a4f3-ef62577d4902 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.692735] env[63279]: WARNING nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance f7f88f1a-a81a-4208-88d7-6a264e642ab1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2122.692848] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance f2a68d73-49d6-4b38-aff1-c2eb850f2ca6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.692955] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance fcdd3076-2b53-4850-9730-2f877e2cabfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.693080] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance ee1b4746-49ac-425c-8219-4d54cb34abe0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.693212] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Migration 44650656-4b9e-403b-9bcc-56f29df93e8b is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2122.693298] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance b981ac83-6c23-4d44-bd28-12da30d746bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.693406] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 668436f9-94e9-48c2-a3d4-3df7bbcf5775 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.693513] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance a0697601-46ae-48ce-a3e1-3c4b81fc1f95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2122.697158] env[63279]: DEBUG nova.virt.hardware [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2122.697431] env[63279]: DEBUG nova.virt.hardware [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2122.697579] env[63279]: DEBUG nova.virt.hardware [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2122.697773] env[63279]: DEBUG nova.virt.hardware [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2122.697921] env[63279]: DEBUG nova.virt.hardware [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2122.698080] env[63279]: DEBUG nova.virt.hardware [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2122.698297] env[63279]: DEBUG nova.virt.hardware [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2122.698461] env[63279]: DEBUG nova.virt.hardware [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2122.698631] env[63279]: DEBUG nova.virt.hardware [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2122.698797] env[63279]: DEBUG nova.virt.hardware [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2122.698973] env[63279]: DEBUG nova.virt.hardware [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2122.700128] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a971006-4471-4836-b359-1df0dcd16950 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.718011] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df91d16a-39de-4508-9b9a-e31b221db4bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.722265] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087553, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.802736] env[63279]: DEBUG nova.network.neutron [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Updating instance_info_cache with network_info: [{"id": "a7b4791a-48da-4f2c-a8f0-debeceec3225", "address": "fa:16:3e:6d:22:31", "network": {"id": "9cfd684f-63e8-44aa-8569-0ab02c790458", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-318205966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f07bad15ea5419cbecc5840b4e96d01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7b4791a-48", "ovs_interfaceid": "a7b4791a-48da-4f2c-a8f0-debeceec3225", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2122.820066] env[63279]: INFO nova.compute.manager [None req-d227dc54-bb84-4c5b-a990-c7df7fd31923 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance to original state: 'active' [ 2123.042392] env[63279]: DEBUG oslo_vmware.api [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087551, 'name': PowerOnVM_Task, 'duration_secs': 1.031172} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.042735] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2123.043100] env[63279]: INFO nova.compute.manager [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Took 11.40 seconds to spawn the instance on the hypervisor. [ 2123.043370] env[63279]: DEBUG nova.compute.manager [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2123.044816] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aacfe0f-8011-4e82-a780-92a00ceae503 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.059365] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087556, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.201015] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 2f5e22f6-ba70-4848-965b-eb1553115323 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2123.201311] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 4ce17bdb-4bed-4e06-af13-e4097b55e17d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2123.201311] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance a6758131-030e-4b33-a2c9-8864055a5bec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2123.215583] env[63279]: DEBUG oslo_vmware.api [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087553, 'name': PowerOnVM_Task, 'duration_secs': 0.605098} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.216069] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2123.216286] env[63279]: INFO nova.compute.manager [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Took 14.15 seconds to spawn the instance on the hypervisor. [ 2123.216465] env[63279]: DEBUG nova.compute.manager [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2123.217248] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25bda16-c2d7-4159-b49f-f3e34662fa85 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.307878] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Releasing lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2123.418344] env[63279]: DEBUG nova.network.neutron [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Successfully updated port: 376a7f06-34a9-4c6e-934f-5470b0a04549 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2123.557756] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087556, 'name': CreateVM_Task, 'duration_secs': 0.574648} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.558637] env[63279]: DEBUG nova.network.neutron [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Updated VIF entry in instance network info cache for port 72b975f4-3f92-4d65-8d8f-119eeb59c2ab. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2123.558958] env[63279]: DEBUG nova.network.neutron [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Updating instance_info_cache with network_info: [{"id": "72b975f4-3f92-4d65-8d8f-119eeb59c2ab", "address": "fa:16:3e:f0:d6:f2", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72b975f4-3f", "ovs_interfaceid": "72b975f4-3f92-4d65-8d8f-119eeb59c2ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2123.560132] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2123.561544] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2123.561757] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2123.562227] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2123.562818] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94df4344-d6b1-4ef8-a8ab-4e4963a33933 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.574132] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2123.574132] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52669d22-03b1-b8af-fd5a-7fb8f87b83d3" [ 2123.574132] env[63279]: _type = "Task" [ 2123.574132] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.574432] env[63279]: INFO nova.compute.manager [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Took 47.15 seconds to build instance. [ 2123.585643] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52669d22-03b1-b8af-fd5a-7fb8f87b83d3, 'name': SearchDatastore_Task, 'duration_secs': 0.009269} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.585869] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2123.586128] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2123.586369] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2123.586752] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2123.586752] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2123.586944] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e677b16c-d600-4772-9748-cc88d6547d4f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.594648] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2123.594850] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2123.595580] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-212238c0-4fbd-46ec-bdeb-e8ca1d4bf150 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.602303] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2123.602303] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52eb3777-4f1d-05c3-e0e7-4afce4cfd800" [ 2123.602303] env[63279]: _type = "Task" [ 2123.602303] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.610045] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52eb3777-4f1d-05c3-e0e7-4afce4cfd800, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.708965] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance d6e40dbc-f20e-4164-b460-18de6ea72906 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2123.737967] env[63279]: INFO nova.compute.manager [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Took 48.12 seconds to build instance. [ 2123.813533] env[63279]: DEBUG nova.compute.manager [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2123.814765] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2d63e8-516d-4e4a-bf56-c13541469580 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.924933] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "refresh_cache-a6758131-030e-4b33-a2c9-8864055a5bec" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2123.924933] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "refresh_cache-a6758131-030e-4b33-a2c9-8864055a5bec" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2123.925077] env[63279]: DEBUG nova.network.neutron [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2124.063572] env[63279]: DEBUG oslo_concurrency.lockutils [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] Releasing lock "refresh_cache-4ce17bdb-4bed-4e06-af13-e4097b55e17d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2124.064103] env[63279]: DEBUG nova.compute.manager [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Received event network-changed-a7b4791a-48da-4f2c-a8f0-debeceec3225 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2124.064455] env[63279]: DEBUG nova.compute.manager [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Refreshing instance network info cache due to event network-changed-a7b4791a-48da-4f2c-a8f0-debeceec3225. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2124.064912] env[63279]: DEBUG oslo_concurrency.lockutils [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] Acquiring lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2124.065143] env[63279]: DEBUG oslo_concurrency.lockutils [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] Acquired lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2124.065476] env[63279]: DEBUG nova.network.neutron [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Refreshing network info cache for port a7b4791a-48da-4f2c-a8f0-debeceec3225 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2124.080165] env[63279]: DEBUG oslo_concurrency.lockutils [None req-934ceb37-5239-47c1-8678-9d43df040bef tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Lock "a0697601-46ae-48ce-a3e1-3c4b81fc1f95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.660s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.116047] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52eb3777-4f1d-05c3-e0e7-4afce4cfd800, 'name': SearchDatastore_Task, 'duration_secs': 0.009075} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.116434] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-331ddb6f-b36c-476a-a24a-4f5fb3936443 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.127901] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2124.127901] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5212213a-af43-95e3-322c-515c474b7328" [ 2124.127901] env[63279]: _type = "Task" [ 2124.127901] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.137611] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5212213a-af43-95e3-322c-515c474b7328, 'name': SearchDatastore_Task, 'duration_secs': 0.00997} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.137835] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2124.138809] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 4ce17bdb-4bed-4e06-af13-e4097b55e17d/4ce17bdb-4bed-4e06-af13-e4097b55e17d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2124.138809] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aac256bf-c4d3-4b13-b25d-9b3ca08ab846 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.146867] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2124.146867] env[63279]: value = "task-2087557" [ 2124.146867] env[63279]: _type = "Task" [ 2124.146867] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.155133] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087557, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.157158] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5280ec89-3902-fd45-d233-4275ba1570cc/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2124.158028] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aebfe01-f2da-4aa2-91b1-10cb6bf00ad2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.167577] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5280ec89-3902-fd45-d233-4275ba1570cc/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2124.167823] env[63279]: ERROR oslo_vmware.rw_handles [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5280ec89-3902-fd45-d233-4275ba1570cc/disk-0.vmdk due to incomplete transfer. [ 2124.168085] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-65ea22b1-6f2c-49c6-96a3-90bc0d4a1604 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.175860] env[63279]: DEBUG oslo_vmware.rw_handles [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5280ec89-3902-fd45-d233-4275ba1570cc/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2124.176149] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Uploaded image d2d04a47-fc99-4ff0-adbc-a27616252dd0 to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2124.178578] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2124.179075] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ed6f9be8-3a68-4f06-a53a-c0a2333f3761 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.185020] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2124.185020] env[63279]: value = "task-2087558" [ 2124.185020] env[63279]: _type = "Task" [ 2124.185020] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.193384] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087558, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.214472] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 861e4118-6134-40cf-91cb-865b6ee9f347 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2124.214823] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 21 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2124.214974] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4672MB phys_disk=200GB used_disk=21GB total_vcpus=48 used_vcpus=21 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2124.240968] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01b37c68-092e-4554-92a7-7422378b5349 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "668436f9-94e9-48c2-a3d4-3df7bbcf5775" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.631s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.494988] env[63279]: DEBUG nova.network.neutron [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2124.656113] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087557, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461585} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.656576] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 4ce17bdb-4bed-4e06-af13-e4097b55e17d/4ce17bdb-4bed-4e06-af13-e4097b55e17d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2124.656655] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2124.659030] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54fd0560-902d-4110-8a59-b261971c415b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.665540] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.665777] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2124.665986] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.666199] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2124.666389] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.668090] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2124.668090] env[63279]: value = "task-2087559" [ 2124.668090] env[63279]: _type = "Task" [ 2124.668090] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.668892] env[63279]: INFO nova.compute.manager [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Terminating instance [ 2124.683724] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087559, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.697274] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087558, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.715886] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c4d45a-89f8-4ca7-8f2c-98b706966cf3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.725195] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335262bb-2b88-4b76-9b4d-a6561b0140df {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.760231] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62dbce72-0609-4450-acac-cf6b2bf9ee8d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.768034] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a697c2a-326e-49bc-a2ff-741a1ab1b342 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.782177] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2124.792901] env[63279]: DEBUG nova.network.neutron [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Updating instance_info_cache with network_info: [{"id": "376a7f06-34a9-4c6e-934f-5470b0a04549", "address": "fa:16:3e:87:30:6b", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap376a7f06-34", "ovs_interfaceid": "376a7f06-34a9-4c6e-934f-5470b0a04549", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2124.832635] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b72cd5-ac62-486e-8848-1471a6e3a7bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.840789] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Doing hard reboot of VM {{(pid=63279) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 2124.841723] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-ca15223e-fe6b-4bb5-9e56-614979677e2f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.848402] env[63279]: DEBUG oslo_vmware.api [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2124.848402] env[63279]: value = "task-2087560" [ 2124.848402] env[63279]: _type = "Task" [ 2124.848402] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.859643] env[63279]: DEBUG oslo_vmware.api [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087560, 'name': ResetVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.925715] env[63279]: DEBUG nova.compute.manager [req-33c10b7a-82fe-40ac-ab3f-a67faae1a288 req-a7866688-04b6-42a5-b42f-568bac260aa9 service nova] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Received event network-vif-plugged-376a7f06-34a9-4c6e-934f-5470b0a04549 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2124.926045] env[63279]: DEBUG oslo_concurrency.lockutils [req-33c10b7a-82fe-40ac-ab3f-a67faae1a288 req-a7866688-04b6-42a5-b42f-568bac260aa9 service nova] Acquiring lock "a6758131-030e-4b33-a2c9-8864055a5bec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.926360] env[63279]: DEBUG oslo_concurrency.lockutils [req-33c10b7a-82fe-40ac-ab3f-a67faae1a288 req-a7866688-04b6-42a5-b42f-568bac260aa9 service nova] Lock "a6758131-030e-4b33-a2c9-8864055a5bec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2124.926616] env[63279]: DEBUG oslo_concurrency.lockutils [req-33c10b7a-82fe-40ac-ab3f-a67faae1a288 req-a7866688-04b6-42a5-b42f-568bac260aa9 service nova] Lock "a6758131-030e-4b33-a2c9-8864055a5bec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.926896] env[63279]: DEBUG nova.compute.manager [req-33c10b7a-82fe-40ac-ab3f-a67faae1a288 req-a7866688-04b6-42a5-b42f-568bac260aa9 service nova] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] No waiting events found dispatching network-vif-plugged-376a7f06-34a9-4c6e-934f-5470b0a04549 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2124.927145] env[63279]: WARNING nova.compute.manager [req-33c10b7a-82fe-40ac-ab3f-a67faae1a288 req-a7866688-04b6-42a5-b42f-568bac260aa9 service nova] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Received unexpected event network-vif-plugged-376a7f06-34a9-4c6e-934f-5470b0a04549 for instance with vm_state building and task_state spawning. [ 2124.927407] env[63279]: DEBUG nova.compute.manager [req-33c10b7a-82fe-40ac-ab3f-a67faae1a288 req-a7866688-04b6-42a5-b42f-568bac260aa9 service nova] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Received event network-changed-376a7f06-34a9-4c6e-934f-5470b0a04549 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2124.927654] env[63279]: DEBUG nova.compute.manager [req-33c10b7a-82fe-40ac-ab3f-a67faae1a288 req-a7866688-04b6-42a5-b42f-568bac260aa9 service nova] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Refreshing instance network info cache due to event network-changed-376a7f06-34a9-4c6e-934f-5470b0a04549. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2124.927914] env[63279]: DEBUG oslo_concurrency.lockutils [req-33c10b7a-82fe-40ac-ab3f-a67faae1a288 req-a7866688-04b6-42a5-b42f-568bac260aa9 service nova] Acquiring lock "refresh_cache-a6758131-030e-4b33-a2c9-8864055a5bec" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2124.937681] env[63279]: DEBUG nova.network.neutron [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Updated VIF entry in instance network info cache for port a7b4791a-48da-4f2c-a8f0-debeceec3225. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2124.938200] env[63279]: DEBUG nova.network.neutron [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Updating instance_info_cache with network_info: [{"id": "a7b4791a-48da-4f2c-a8f0-debeceec3225", "address": "fa:16:3e:6d:22:31", "network": {"id": "9cfd684f-63e8-44aa-8569-0ab02c790458", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-318205966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f07bad15ea5419cbecc5840b4e96d01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7b4791a-48", "ovs_interfaceid": "a7b4791a-48da-4f2c-a8f0-debeceec3225", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2124.992933] env[63279]: DEBUG oslo_concurrency.lockutils [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "668436f9-94e9-48c2-a3d4-3df7bbcf5775" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.993153] env[63279]: DEBUG oslo_concurrency.lockutils [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "668436f9-94e9-48c2-a3d4-3df7bbcf5775" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2124.993350] env[63279]: DEBUG oslo_concurrency.lockutils [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "668436f9-94e9-48c2-a3d4-3df7bbcf5775-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.993546] env[63279]: DEBUG oslo_concurrency.lockutils [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "668436f9-94e9-48c2-a3d4-3df7bbcf5775-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2124.993732] env[63279]: DEBUG oslo_concurrency.lockutils [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "668436f9-94e9-48c2-a3d4-3df7bbcf5775-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.996411] env[63279]: INFO nova.compute.manager [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Terminating instance [ 2125.177412] env[63279]: DEBUG nova.compute.manager [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2125.177714] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2125.181954] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8448eb-eeb3-4da0-aede-d6cbcdb210cb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.184643] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087559, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068783} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.184897] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2125.186081] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65faccb-490a-4c86-96ab-ac064c257616 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.192887] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2125.195964] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51a0fb78-6455-4778-9619-c72456895c88 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.215737] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 4ce17bdb-4bed-4e06-af13-e4097b55e17d/4ce17bdb-4bed-4e06-af13-e4097b55e17d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2125.220423] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70acd6ea-1201-4eb8-8d84-4e957f8abad7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.235139] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087558, 'name': Destroy_Task, 'duration_secs': 0.796818} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.235463] env[63279]: DEBUG oslo_vmware.api [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2125.235463] env[63279]: value = "task-2087561" [ 2125.235463] env[63279]: _type = "Task" [ 2125.235463] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.235707] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Destroyed the VM [ 2125.235950] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2125.236675] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c8b1f2d2-2516-41ba-8ad4-45acad61cc7b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.242535] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2125.242535] env[63279]: value = "task-2087562" [ 2125.242535] env[63279]: _type = "Task" [ 2125.242535] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.249918] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2125.249918] env[63279]: value = "task-2087563" [ 2125.249918] env[63279]: _type = "Task" [ 2125.249918] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.250146] env[63279]: DEBUG oslo_vmware.api [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087561, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.257065] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087562, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.262206] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087563, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.285280] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2125.297176] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "refresh_cache-a6758131-030e-4b33-a2c9-8864055a5bec" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2125.297176] env[63279]: DEBUG nova.compute.manager [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Instance network_info: |[{"id": "376a7f06-34a9-4c6e-934f-5470b0a04549", "address": "fa:16:3e:87:30:6b", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap376a7f06-34", "ovs_interfaceid": "376a7f06-34a9-4c6e-934f-5470b0a04549", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2125.297424] env[63279]: DEBUG oslo_concurrency.lockutils [req-33c10b7a-82fe-40ac-ab3f-a67faae1a288 req-a7866688-04b6-42a5-b42f-568bac260aa9 service nova] Acquired lock "refresh_cache-a6758131-030e-4b33-a2c9-8864055a5bec" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2125.297424] env[63279]: DEBUG nova.network.neutron [req-33c10b7a-82fe-40ac-ab3f-a67faae1a288 req-a7866688-04b6-42a5-b42f-568bac260aa9 service nova] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Refreshing network info cache for port 376a7f06-34a9-4c6e-934f-5470b0a04549 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2125.299067] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:30:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0549820d-5649-40bc-ad6e-9ae27b384d90', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '376a7f06-34a9-4c6e-934f-5470b0a04549', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2125.307468] env[63279]: DEBUG oslo.service.loopingcall [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2125.309219] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2125.310219] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ee93299-ef6a-48e8-a4ee-a337491e4341 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.334233] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2125.334233] env[63279]: value = "task-2087564" [ 2125.334233] env[63279]: _type = "Task" [ 2125.334233] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.344725] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087564, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.358244] env[63279]: DEBUG oslo_vmware.api [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087560, 'name': ResetVM_Task, 'duration_secs': 0.12042} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.358244] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Did hard reboot of VM {{(pid=63279) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 2125.358244] env[63279]: DEBUG nova.compute.manager [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2125.358540] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb60a444-3158-4080-9723-174c3e28b0e5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.441110] env[63279]: DEBUG oslo_concurrency.lockutils [req-77bc0d3c-6aba-428b-afbe-86c0f04db8f2 req-d20ccaa0-217d-45ae-9aa5-8f4c845d0b3a service nova] Releasing lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2125.500537] env[63279]: DEBUG nova.compute.manager [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2125.500834] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2125.501894] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7c4931-64c3-4364-9ed4-e31fab4f7521 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.512398] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2125.512669] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c59fdb0-1f91-49b5-a68f-0c60cb5daee5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.518234] env[63279]: DEBUG oslo_vmware.api [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2125.518234] env[63279]: value = "task-2087565" [ 2125.518234] env[63279]: _type = "Task" [ 2125.518234] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.526528] env[63279]: DEBUG oslo_vmware.api [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.755236] env[63279]: DEBUG oslo_vmware.api [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087561, 'name': PowerOffVM_Task, 'duration_secs': 0.295548} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.760029] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2125.763023] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2125.763023] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1cdd0490-1bc1-4f77-a07e-5d886539f389 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.772447] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087562, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.776194] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087563, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.793094] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2125.793094] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.153s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.793094] env[63279]: DEBUG oslo_concurrency.lockutils [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 37.824s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2125.793500] env[63279]: DEBUG nova.objects.instance [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2125.797791] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2125.798073] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Cleaning up deleted instances {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11747}} [ 2125.846140] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087564, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.872363] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e4649b36-486a-4b0f-ad87-78c041dbfc0c tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "ee1b4746-49ac-425c-8219-4d54cb34abe0" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.422s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.919479] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2125.919953] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2125.920363] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Deleting the datastore file [datastore1] f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2125.921198] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1fdb2d4-1b78-45dc-ab9c-6c79e9da7749 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.928161] env[63279]: DEBUG oslo_vmware.api [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2125.928161] env[63279]: value = "task-2087567" [ 2125.928161] env[63279]: _type = "Task" [ 2125.928161] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.939031] env[63279]: DEBUG oslo_vmware.api [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.977065] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "4acfb474-d861-467a-983c-0dd5641e66f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2125.977280] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "4acfb474-d861-467a-983c-0dd5641e66f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.027877] env[63279]: DEBUG oslo_vmware.api [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087565, 'name': PowerOffVM_Task, 'duration_secs': 0.422159} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.028283] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2126.028501] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2126.028840] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d9c47b4-09ee-4bc2-9810-147d7206d9b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.128842] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2126.128842] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2126.128842] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Deleting the datastore file [datastore1] 668436f9-94e9-48c2-a3d4-3df7bbcf5775 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2126.128842] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d945da7b-9992-43a6-aafb-f72cdfcfdb26 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.134126] env[63279]: DEBUG oslo_vmware.api [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for the task: (returnval){ [ 2126.134126] env[63279]: value = "task-2087569" [ 2126.134126] env[63279]: _type = "Task" [ 2126.134126] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.143154] env[63279]: DEBUG oslo_vmware.api [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087569, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.153986] env[63279]: DEBUG nova.network.neutron [req-33c10b7a-82fe-40ac-ab3f-a67faae1a288 req-a7866688-04b6-42a5-b42f-568bac260aa9 service nova] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Updated VIF entry in instance network info cache for port 376a7f06-34a9-4c6e-934f-5470b0a04549. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2126.154088] env[63279]: DEBUG nova.network.neutron [req-33c10b7a-82fe-40ac-ab3f-a67faae1a288 req-a7866688-04b6-42a5-b42f-568bac260aa9 service nova] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Updating instance_info_cache with network_info: [{"id": "376a7f06-34a9-4c6e-934f-5470b0a04549", "address": "fa:16:3e:87:30:6b", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap376a7f06-34", "ovs_interfaceid": "376a7f06-34a9-4c6e-934f-5470b0a04549", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2126.255546] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087562, 'name': ReconfigVM_Task, 'duration_secs': 0.626831} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.258563] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 4ce17bdb-4bed-4e06-af13-e4097b55e17d/4ce17bdb-4bed-4e06-af13-e4097b55e17d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2126.259306] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Acquiring lock "a0697601-46ae-48ce-a3e1-3c4b81fc1f95" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.259560] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Lock "a0697601-46ae-48ce-a3e1-3c4b81fc1f95" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.259768] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Acquiring lock "a0697601-46ae-48ce-a3e1-3c4b81fc1f95-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.259952] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Lock "a0697601-46ae-48ce-a3e1-3c4b81fc1f95-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.260135] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Lock "a0697601-46ae-48ce-a3e1-3c4b81fc1f95-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.261597] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55f86da1-84d8-498b-a092-9248ddda8a27 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.263704] env[63279]: INFO nova.compute.manager [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Terminating instance [ 2126.271096] env[63279]: DEBUG oslo_vmware.api [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087563, 'name': RemoveSnapshot_Task, 'duration_secs': 0.940996} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.272287] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2126.272522] env[63279]: INFO nova.compute.manager [None req-ffc6fb71-2f93-4848-b384-1a69f6471293 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Took 17.64 seconds to snapshot the instance on the hypervisor. [ 2126.275134] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2126.275134] env[63279]: value = "task-2087570" [ 2126.275134] env[63279]: _type = "Task" [ 2126.275134] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.284898] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087570, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.316140] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] There are 47 instances to clean {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11756}} [ 2126.316403] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 795560b4-ccdc-4012-8130-042dcb94085f] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2126.344089] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087564, 'name': CreateVM_Task, 'duration_secs': 0.643343} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.344279] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2126.344965] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2126.345149] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2126.345462] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2126.345715] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01f03fdb-e07c-4fe8-a23f-02a459c0b830 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.350392] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2126.350392] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52390b1a-bc7a-b133-af47-6c85f48ff372" [ 2126.350392] env[63279]: _type = "Task" [ 2126.350392] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.358188] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52390b1a-bc7a-b133-af47-6c85f48ff372, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.437213] env[63279]: DEBUG oslo_vmware.api [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087567, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151129} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.437438] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2126.437669] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2126.437811] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2126.437988] env[63279]: INFO nova.compute.manager [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Took 1.26 seconds to destroy the instance on the hypervisor. [ 2126.438243] env[63279]: DEBUG oslo.service.loopingcall [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2126.438439] env[63279]: DEBUG nova.compute.manager [-] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2126.438540] env[63279]: DEBUG nova.network.neutron [-] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2126.479890] env[63279]: DEBUG nova.compute.manager [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2126.527598] env[63279]: DEBUG nova.compute.manager [req-e02e82b3-79cd-42eb-8fcf-d1f72b00f08a req-6b4fadfd-bbbd-4e14-97fc-68d6a019c139 service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Received event network-changed-a7b4791a-48da-4f2c-a8f0-debeceec3225 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2126.527778] env[63279]: DEBUG nova.compute.manager [req-e02e82b3-79cd-42eb-8fcf-d1f72b00f08a req-6b4fadfd-bbbd-4e14-97fc-68d6a019c139 service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Refreshing instance network info cache due to event network-changed-a7b4791a-48da-4f2c-a8f0-debeceec3225. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2126.527975] env[63279]: DEBUG oslo_concurrency.lockutils [req-e02e82b3-79cd-42eb-8fcf-d1f72b00f08a req-6b4fadfd-bbbd-4e14-97fc-68d6a019c139 service nova] Acquiring lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2126.528107] env[63279]: DEBUG oslo_concurrency.lockutils [req-e02e82b3-79cd-42eb-8fcf-d1f72b00f08a req-6b4fadfd-bbbd-4e14-97fc-68d6a019c139 service nova] Acquired lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2126.528685] env[63279]: DEBUG nova.network.neutron [req-e02e82b3-79cd-42eb-8fcf-d1f72b00f08a req-6b4fadfd-bbbd-4e14-97fc-68d6a019c139 service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Refreshing network info cache for port a7b4791a-48da-4f2c-a8f0-debeceec3225 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2126.646942] env[63279]: DEBUG oslo_vmware.api [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Task: {'id': task-2087569, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156422} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.647245] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2126.647507] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2126.647716] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2126.647891] env[63279]: INFO nova.compute.manager [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2126.648256] env[63279]: DEBUG oslo.service.loopingcall [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2126.648701] env[63279]: DEBUG nova.compute.manager [-] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2126.648802] env[63279]: DEBUG nova.network.neutron [-] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2126.657513] env[63279]: DEBUG oslo_concurrency.lockutils [req-33c10b7a-82fe-40ac-ab3f-a67faae1a288 req-a7866688-04b6-42a5-b42f-568bac260aa9 service nova] Releasing lock "refresh_cache-a6758131-030e-4b33-a2c9-8864055a5bec" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2126.768156] env[63279]: DEBUG nova.compute.manager [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2126.768156] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2126.772109] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73874d5-f751-48f6-af3a-3c44897a4fe7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.787438] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2126.790586] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73c682c8-abb4-4a0b-824b-cd309431622e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.792285] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087570, 'name': Rename_Task, 'duration_secs': 0.128808} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.792469] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2126.793046] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6aeb508-c3e1-42c3-ba07-92f52b271143 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.797356] env[63279]: DEBUG oslo_vmware.api [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Waiting for the task: (returnval){ [ 2126.797356] env[63279]: value = "task-2087571" [ 2126.797356] env[63279]: _type = "Task" [ 2126.797356] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.802106] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2126.802106] env[63279]: value = "task-2087572" [ 2126.802106] env[63279]: _type = "Task" [ 2126.802106] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.811146] env[63279]: DEBUG oslo_vmware.api [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087571, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.814630] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087572, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.820294] env[63279]: DEBUG oslo_concurrency.lockutils [None req-55b3f25a-ae80-4a17-b3bc-af0b2acf3c86 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.027s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.821466] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 7db0c32d-36a4-4452-bb07-06de0c93ab50] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2126.823543] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.817s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.823543] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.829383] env[63279]: DEBUG oslo_concurrency.lockutils [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.525s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.829593] env[63279]: DEBUG oslo_concurrency.lockutils [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.831943] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.612s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.831943] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.833529] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.517s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.833644] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.835252] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.952s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.838992] env[63279]: INFO nova.compute.claims [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2126.862397] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52390b1a-bc7a-b133-af47-6c85f48ff372, 'name': SearchDatastore_Task, 'duration_secs': 0.009137} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.862853] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2126.864029] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2126.864029] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2126.864029] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2126.864029] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2126.864338] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2579612-71ee-4dd1-8ab4-a54442f051ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.868699] env[63279]: INFO nova.scheduler.client.report [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Deleted allocations for instance df963c29-a1c4-4f28-be95-cafe3af4d2fa [ 2126.878446] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2126.878446] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2126.880759] env[63279]: INFO nova.scheduler.client.report [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Deleted allocations for instance 246f0945-7290-4cb7-a982-b17cb1573002 [ 2126.889495] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf2c961c-dd3e-48f1-a363-147607bb6237 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.897335] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2126.897335] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]524df0e0-3d18-d402-6f73-b81368e8cb08" [ 2126.897335] env[63279]: _type = "Task" [ 2126.897335] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.902117] env[63279]: INFO nova.scheduler.client.report [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleted allocations for instance c1ac4af5-b01e-4175-844f-7a67b2ef7526 [ 2126.912976] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524df0e0-3d18-d402-6f73-b81368e8cb08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.914117] env[63279]: INFO nova.scheduler.client.report [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Deleted allocations for instance 4871421f-0015-4973-bb5f-c9042d411c82 [ 2127.002789] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2127.021070] env[63279]: DEBUG nova.compute.manager [req-e7d6b695-8c2f-4b38-9dd9-289f6e4d4114 req-fa38ec14-ca05-4f95-b67f-8dd0daf0d60e service nova] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Received event network-vif-deleted-cbbfde33-a0b6-4403-8a1e-d688a0a7147b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2127.021329] env[63279]: INFO nova.compute.manager [req-e7d6b695-8c2f-4b38-9dd9-289f6e4d4114 req-fa38ec14-ca05-4f95-b67f-8dd0daf0d60e service nova] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Neutron deleted interface cbbfde33-a0b6-4403-8a1e-d688a0a7147b; detaching it from the instance and deleting it from the info cache [ 2127.021540] env[63279]: DEBUG nova.network.neutron [req-e7d6b695-8c2f-4b38-9dd9-289f6e4d4114 req-fa38ec14-ca05-4f95-b67f-8dd0daf0d60e service nova] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2127.133517] env[63279]: DEBUG oslo_concurrency.lockutils [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "ee1b4746-49ac-425c-8219-4d54cb34abe0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2127.134517] env[63279]: DEBUG oslo_concurrency.lockutils [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "ee1b4746-49ac-425c-8219-4d54cb34abe0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2127.134517] env[63279]: DEBUG oslo_concurrency.lockutils [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "ee1b4746-49ac-425c-8219-4d54cb34abe0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2127.134517] env[63279]: DEBUG oslo_concurrency.lockutils [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "ee1b4746-49ac-425c-8219-4d54cb34abe0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2127.134517] env[63279]: DEBUG oslo_concurrency.lockutils [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "ee1b4746-49ac-425c-8219-4d54cb34abe0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.138205] env[63279]: INFO nova.compute.manager [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Terminating instance [ 2127.313036] env[63279]: DEBUG nova.network.neutron [req-e02e82b3-79cd-42eb-8fcf-d1f72b00f08a req-6b4fadfd-bbbd-4e14-97fc-68d6a019c139 service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Updated VIF entry in instance network info cache for port a7b4791a-48da-4f2c-a8f0-debeceec3225. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2127.313422] env[63279]: DEBUG nova.network.neutron [req-e02e82b3-79cd-42eb-8fcf-d1f72b00f08a req-6b4fadfd-bbbd-4e14-97fc-68d6a019c139 service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Updating instance_info_cache with network_info: [{"id": "a7b4791a-48da-4f2c-a8f0-debeceec3225", "address": "fa:16:3e:6d:22:31", "network": {"id": "9cfd684f-63e8-44aa-8569-0ab02c790458", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-318205966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f07bad15ea5419cbecc5840b4e96d01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7b4791a-48", "ovs_interfaceid": "a7b4791a-48da-4f2c-a8f0-debeceec3225", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2127.320181] env[63279]: DEBUG oslo_vmware.api [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087571, 'name': PowerOffVM_Task, 'duration_secs': 0.212863} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.320181] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2127.320181] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2127.320181] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9cf214c-5c6b-4439-aa46-bc89c879e79e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.324435] env[63279]: DEBUG oslo_vmware.api [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087572, 'name': PowerOnVM_Task, 'duration_secs': 0.462915} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.325544] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2127.325747] env[63279]: INFO nova.compute.manager [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Took 7.40 seconds to spawn the instance on the hypervisor. [ 2127.325942] env[63279]: DEBUG nova.compute.manager [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2127.328027] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6d1dd6-b347-4139-be05-afd043b27123 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.343785] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: ff9701ed-d545-44b4-911a-c4d809d0a771] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2127.349018] env[63279]: DEBUG nova.network.neutron [-] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2127.398077] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4f94911-762d-4d53-97a7-f3731ce38593 tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "df963c29-a1c4-4f28-be95-cafe3af4d2fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.670s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.399423] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70205c4c-5bd3-44be-b3d5-46d8934028a1 tempest-ServerAddressesTestJSON-1242830333 tempest-ServerAddressesTestJSON-1242830333-project-member] Lock "246f0945-7290-4cb7-a982-b17cb1573002" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.372s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.413872] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524df0e0-3d18-d402-6f73-b81368e8cb08, 'name': SearchDatastore_Task, 'duration_secs': 0.013176} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.415142] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d69d8024-035b-4224-810d-1bc16bb5d645 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.420225] env[63279]: DEBUG oslo_concurrency.lockutils [None req-115ccca1-b77d-4cbf-8091-6f2f334a342f tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "c1ac4af5-b01e-4175-844f-7a67b2ef7526" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.042s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.426316] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2127.426316] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52accfc5-7bc7-3d2b-098a-0f1e7fd321b5" [ 2127.426316] env[63279]: _type = "Task" [ 2127.426316] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.429130] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cc9ff1f-1d62-43ca-8885-080f467e6ec3 tempest-ServerShowV254Test-1528218800 tempest-ServerShowV254Test-1528218800-project-member] Lock "4871421f-0015-4973-bb5f-c9042d411c82" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.368s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.436214] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52accfc5-7bc7-3d2b-098a-0f1e7fd321b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.516241] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2127.516560] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2127.517260] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Deleting the datastore file [datastore1] a0697601-46ae-48ce-a3e1-3c4b81fc1f95 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2127.517260] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be9feec1-1930-4467-85fa-a9c39be16d19 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.525047] env[63279]: DEBUG oslo_vmware.api [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Waiting for the task: (returnval){ [ 2127.525047] env[63279]: value = "task-2087574" [ 2127.525047] env[63279]: _type = "Task" [ 2127.525047] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.525047] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11ced372-d5e1-405e-9ec9-a5b81a54a443 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.535241] env[63279]: DEBUG oslo_vmware.api [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.536420] env[63279]: DEBUG nova.network.neutron [-] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2127.540646] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82afee18-ade8-4bbf-b0b4-77ef2f005d40 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.552278] env[63279]: INFO nova.compute.manager [-] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Took 0.90 seconds to deallocate network for instance. [ 2127.583441] env[63279]: DEBUG nova.compute.manager [req-e7d6b695-8c2f-4b38-9dd9-289f6e4d4114 req-fa38ec14-ca05-4f95-b67f-8dd0daf0d60e service nova] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Detach interface failed, port_id=cbbfde33-a0b6-4403-8a1e-d688a0a7147b, reason: Instance f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2127.642158] env[63279]: DEBUG nova.compute.manager [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2127.642441] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2127.643769] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727c878d-8f39-4d1a-827e-fdea9184edd9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.654733] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2127.655016] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-abd1aa42-d97b-48c2-b152-2f53775defdb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.661993] env[63279]: DEBUG oslo_vmware.api [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2127.661993] env[63279]: value = "task-2087575" [ 2127.661993] env[63279]: _type = "Task" [ 2127.661993] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.670480] env[63279]: DEBUG oslo_vmware.api [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087575, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.820855] env[63279]: DEBUG oslo_concurrency.lockutils [req-e02e82b3-79cd-42eb-8fcf-d1f72b00f08a req-6b4fadfd-bbbd-4e14-97fc-68d6a019c139 service nova] Releasing lock "refresh_cache-ee1b4746-49ac-425c-8219-4d54cb34abe0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2127.844142] env[63279]: INFO nova.compute.manager [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Took 44.99 seconds to build instance. [ 2127.851490] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 64e92bfc-c0d0-4918-9ba2-45ffedbf7e39] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2127.852188] env[63279]: INFO nova.compute.manager [-] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Took 1.41 seconds to deallocate network for instance. [ 2127.942321] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52accfc5-7bc7-3d2b-098a-0f1e7fd321b5, 'name': SearchDatastore_Task, 'duration_secs': 0.00954} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.945326] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2127.945777] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] a6758131-030e-4b33-a2c9-8864055a5bec/a6758131-030e-4b33-a2c9-8864055a5bec.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2127.946696] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6fe2cdc-a093-498f-a171-66a3f5459e68 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.956021] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2127.956021] env[63279]: value = "task-2087576" [ 2127.956021] env[63279]: _type = "Task" [ 2127.956021] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.980624] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087576, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.044548] env[63279]: DEBUG oslo_vmware.api [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Task: {'id': task-2087574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182935} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.044548] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2128.044548] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2128.044548] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2128.044548] env[63279]: INFO nova.compute.manager [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Took 1.27 seconds to destroy the instance on the hypervisor. [ 2128.044923] env[63279]: DEBUG oslo.service.loopingcall [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2128.044923] env[63279]: DEBUG nova.compute.manager [-] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2128.044923] env[63279]: DEBUG nova.network.neutron [-] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2128.061031] env[63279]: DEBUG oslo_concurrency.lockutils [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.136484] env[63279]: DEBUG oslo_concurrency.lockutils [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "6699de0a-b3f8-4d84-9c9b-d0f6899a606e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.136760] env[63279]: DEBUG oslo_concurrency.lockutils [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "6699de0a-b3f8-4d84-9c9b-d0f6899a606e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.137027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "6699de0a-b3f8-4d84-9c9b-d0f6899a606e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.137197] env[63279]: DEBUG oslo_concurrency.lockutils [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "6699de0a-b3f8-4d84-9c9b-d0f6899a606e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.137452] env[63279]: DEBUG oslo_concurrency.lockutils [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "6699de0a-b3f8-4d84-9c9b-d0f6899a606e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.143404] env[63279]: INFO nova.compute.manager [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Terminating instance [ 2128.174205] env[63279]: DEBUG oslo_vmware.api [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087575, 'name': PowerOffVM_Task, 'duration_secs': 0.362635} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.175459] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2128.175459] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2128.175459] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1432dc3-69e8-4f14-9328-ca8b01442b96 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.350334] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d83e173e-3a00-46ae-8eed-94284cfc23f8 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "4ce17bdb-4bed-4e06-af13-e4097b55e17d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.508s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.354469] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 8ccb4293-927a-45ba-82e9-9f1b4d5985cc] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2128.365236] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.386027] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2128.386027] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2128.386027] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Deleting the datastore file [datastore1] ee1b4746-49ac-425c-8219-4d54cb34abe0 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2128.386027] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb46cbd2-8f05-4c1a-9b21-caef497f5c57 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.399153] env[63279]: DEBUG oslo_vmware.api [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2128.399153] env[63279]: value = "task-2087578" [ 2128.399153] env[63279]: _type = "Task" [ 2128.399153] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.407057] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f390f75-4f88-46d9-ada9-6a3320a80f24 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.414931] env[63279]: DEBUG oslo_vmware.api [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087578, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.421676] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb9339c-2211-48bd-b9e2-e3e1878109e8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.457432] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b262cee6-1fb8-460f-b199-22884464898f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.460819] env[63279]: DEBUG oslo_concurrency.lockutils [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "4a9088e0-2992-4b18-8be9-6bc70633369b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.461071] env[63279]: DEBUG oslo_concurrency.lockutils [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "4a9088e0-2992-4b18-8be9-6bc70633369b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.461285] env[63279]: DEBUG oslo_concurrency.lockutils [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "4a9088e0-2992-4b18-8be9-6bc70633369b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.461499] env[63279]: DEBUG oslo_concurrency.lockutils [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "4a9088e0-2992-4b18-8be9-6bc70633369b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.461682] env[63279]: DEBUG oslo_concurrency.lockutils [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "4a9088e0-2992-4b18-8be9-6bc70633369b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.466200] env[63279]: INFO nova.compute.manager [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Terminating instance [ 2128.474444] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a175abe-d283-4ffa-8a20-61c10c92cec0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.486072] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087576, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48731} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.495521] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] a6758131-030e-4b33-a2c9-8864055a5bec/a6758131-030e-4b33-a2c9-8864055a5bec.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2128.495814] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2128.497311] env[63279]: DEBUG nova.compute.provider_tree [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2128.498689] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93f7d7ba-2104-449d-938b-9a87c0a6e3c7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.506425] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2128.506425] env[63279]: value = "task-2087579" [ 2128.506425] env[63279]: _type = "Task" [ 2128.506425] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.516013] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087579, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.647228] env[63279]: DEBUG oslo_concurrency.lockutils [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "refresh_cache-6699de0a-b3f8-4d84-9c9b-d0f6899a606e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2128.648910] env[63279]: DEBUG oslo_concurrency.lockutils [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquired lock "refresh_cache-6699de0a-b3f8-4d84-9c9b-d0f6899a606e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2128.648910] env[63279]: DEBUG nova.network.neutron [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2128.843306] env[63279]: DEBUG nova.network.neutron [-] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2128.856934] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 58392790-b297-4894-8d81-e5cbda69872b] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2128.907773] env[63279]: DEBUG oslo_vmware.api [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087578, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167814} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.908060] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2128.908256] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2128.908441] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2128.908685] env[63279]: INFO nova.compute.manager [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Took 1.27 seconds to destroy the instance on the hypervisor. [ 2128.908860] env[63279]: DEBUG oslo.service.loopingcall [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2128.909065] env[63279]: DEBUG nova.compute.manager [-] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2128.909164] env[63279]: DEBUG nova.network.neutron [-] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2128.982960] env[63279]: DEBUG nova.compute.manager [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2128.983084] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2128.983953] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fedfddbb-baa8-4564-a758-d0a141a34ea0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.992282] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2128.994449] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d482129c-0619-40bf-b84e-062717ca0f8a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.001673] env[63279]: DEBUG oslo_vmware.api [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2129.001673] env[63279]: value = "task-2087580" [ 2129.001673] env[63279]: _type = "Task" [ 2129.001673] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.006229] env[63279]: DEBUG nova.scheduler.client.report [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2129.017101] env[63279]: DEBUG oslo_vmware.api [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087580, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.018796] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087579, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070031} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.019064] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2129.019939] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a842fde-943b-4798-8e32-20bc3b4d7844 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.044195] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] a6758131-030e-4b33-a2c9-8864055a5bec/a6758131-030e-4b33-a2c9-8864055a5bec.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2129.045192] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ad6200f-d6aa-4d43-8553-68817e30f9d2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.066384] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2129.066384] env[63279]: value = "task-2087581" [ 2129.066384] env[63279]: _type = "Task" [ 2129.066384] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.078433] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087581, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.084616] env[63279]: DEBUG nova.compute.manager [req-8431688e-f513-4a53-9dfc-307ee049f3eb req-27089a3c-a196-4bfe-848e-2efe15ccff73 service nova] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Received event network-vif-deleted-fa1d70e2-1628-429b-a1b6-a29d406767dd {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2129.084696] env[63279]: DEBUG nova.compute.manager [req-8431688e-f513-4a53-9dfc-307ee049f3eb req-27089a3c-a196-4bfe-848e-2efe15ccff73 service nova] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Received event network-vif-deleted-d7d74026-baa1-4909-9860-a7d5ebe08169 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2129.192187] env[63279]: DEBUG nova.network.neutron [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2129.346350] env[63279]: INFO nova.compute.manager [-] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Took 1.30 seconds to deallocate network for instance. [ 2129.359841] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: fb124cfa-24b4-4712-b8cc-c87df5d6231b] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2129.511126] env[63279]: DEBUG oslo_vmware.api [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087580, 'name': PowerOffVM_Task, 'duration_secs': 0.186104} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.511877] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.677s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.512448] env[63279]: DEBUG nova.compute.manager [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2129.519242] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2129.519242] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2129.519242] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.927s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.519242] env[63279]: INFO nova.compute.claims [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2129.522754] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f0dd8d3-57a1-40e6-8e23-538376688eb0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.550763] env[63279]: DEBUG nova.network.neutron [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2129.576510] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.854013] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2129.865789] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 1b2ca21b-feea-4fc1-9ddc-99f144e4241a] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2130.017770] env[63279]: DEBUG nova.compute.utils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2130.019662] env[63279]: DEBUG nova.compute.manager [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2130.019662] env[63279]: DEBUG nova.network.neutron [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2130.053832] env[63279]: DEBUG oslo_concurrency.lockutils [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Releasing lock "refresh_cache-6699de0a-b3f8-4d84-9c9b-d0f6899a606e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2130.054320] env[63279]: DEBUG nova.compute.manager [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2130.054580] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2130.055537] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41c9bc6-6460-4eff-9544-a949c41983a8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.060469] env[63279]: DEBUG nova.policy [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '655d692da88947b89104e1f14f7d71f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5a719a21fe248c49d0d0151d218866b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2130.067381] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2130.070591] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a0e4c53-3e63-4eed-844d-f626f3a9d340 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.077503] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087581, 'name': ReconfigVM_Task, 'duration_secs': 0.796327} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.078847] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Reconfigured VM instance instance-0000004c to attach disk [datastore1] a6758131-030e-4b33-a2c9-8864055a5bec/a6758131-030e-4b33-a2c9-8864055a5bec.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2130.079548] env[63279]: DEBUG oslo_vmware.api [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2130.079548] env[63279]: value = "task-2087583" [ 2130.079548] env[63279]: _type = "Task" [ 2130.079548] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.079793] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c9bf5fe-b783-4193-ba83-3998c5c5f530 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.089471] env[63279]: DEBUG oslo_vmware.api [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.090845] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2130.090845] env[63279]: value = "task-2087584" [ 2130.090845] env[63279]: _type = "Task" [ 2130.090845] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.098611] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087584, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.103411] env[63279]: DEBUG nova.network.neutron [-] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2130.356408] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2130.356487] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2130.357382] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleting the datastore file [datastore1] 4a9088e0-2992-4b18-8be9-6bc70633369b {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2130.357382] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-adec6cfb-cfa8-49e0-9c7c-1dce562fdb54 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.364196] env[63279]: DEBUG oslo_vmware.api [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2130.364196] env[63279]: value = "task-2087585" [ 2130.364196] env[63279]: _type = "Task" [ 2130.364196] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.369404] env[63279]: DEBUG nova.network.neutron [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Successfully created port: 374ef31e-8b37-4d61-8e90-7ac4e4279a08 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2130.371322] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 32789822-cb54-43e7-beae-b5ed3002f4ad] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2130.378445] env[63279]: DEBUG oslo_vmware.api [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087585, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.523090] env[63279]: DEBUG nova.compute.manager [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2130.595181] env[63279]: DEBUG oslo_vmware.api [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087583, 'name': PowerOffVM_Task, 'duration_secs': 0.278187} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.595607] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2130.595785] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2130.596054] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0838ea56-90f7-4e06-a8ba-588a2b0e1837 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.602790] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087584, 'name': Rename_Task, 'duration_secs': 0.229295} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.604826] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2130.605454] env[63279]: INFO nova.compute.manager [-] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Took 1.70 seconds to deallocate network for instance. [ 2130.605718] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2135130-cca0-4abf-9533-a1f7be976f1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.613881] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2130.613881] env[63279]: value = "task-2087587" [ 2130.613881] env[63279]: _type = "Task" [ 2130.613881] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.628582] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087587, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.630035] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2130.630254] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2130.630440] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Deleting the datastore file [datastore1] 6699de0a-b3f8-4d84-9c9b-d0f6899a606e {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2130.630686] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-440e316d-0455-413d-879a-bf0853986d12 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.636739] env[63279]: DEBUG oslo_vmware.api [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for the task: (returnval){ [ 2130.636739] env[63279]: value = "task-2087588" [ 2130.636739] env[63279]: _type = "Task" [ 2130.636739] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.649998] env[63279]: DEBUG oslo_vmware.api [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087588, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.876021] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 79032b2a-74f7-4c6d-8f71-f848fe372ba2] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2130.876873] env[63279]: DEBUG oslo_vmware.api [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087585, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185939} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.877475] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2130.877475] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2130.877609] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2130.877687] env[63279]: INFO nova.compute.manager [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Took 1.89 seconds to destroy the instance on the hypervisor. [ 2130.877919] env[63279]: DEBUG oslo.service.loopingcall [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2130.878304] env[63279]: DEBUG nova.compute.manager [-] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2130.878395] env[63279]: DEBUG nova.network.neutron [-] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2130.921793] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2816b047-3b7c-4a6e-bb31-4a1164918a2b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.929546] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a72dec-1302-48c5-9365-313d4bfb634d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.964292] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23ec2e1-a1d7-4170-a6b6-c047509916de {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.972208] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9a1490-8f81-45c3-8a9e-1e2ebbb2dc8b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.988866] env[63279]: DEBUG nova.compute.provider_tree [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2131.114875] env[63279]: DEBUG nova.compute.manager [req-273cfdd3-8463-40ca-9563-bcf9d53fff13 req-281f0fa3-0f73-495a-8d29-879e5e1c5349 service nova] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Received event network-vif-deleted-a7b4791a-48da-4f2c-a8f0-debeceec3225 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2131.115504] env[63279]: DEBUG oslo_concurrency.lockutils [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.127219] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087587, 'name': PowerOnVM_Task} progress is 76%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.148290] env[63279]: DEBUG oslo_vmware.api [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Task: {'id': task-2087588, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126025} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2131.148290] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2131.148290] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2131.148290] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2131.148290] env[63279]: INFO nova.compute.manager [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Took 1.09 seconds to destroy the instance on the hypervisor. [ 2131.148669] env[63279]: DEBUG oslo.service.loopingcall [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2131.148669] env[63279]: DEBUG nova.compute.manager [-] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2131.148669] env[63279]: DEBUG nova.network.neutron [-] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2131.161561] env[63279]: DEBUG nova.network.neutron [-] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2131.379178] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: ad435281-55a0-418a-8400-5c461a5c15ef] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2131.492708] env[63279]: DEBUG nova.scheduler.client.report [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2131.537318] env[63279]: DEBUG nova.compute.manager [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2131.564426] env[63279]: DEBUG nova.virt.hardware [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2131.564823] env[63279]: DEBUG nova.virt.hardware [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2131.565013] env[63279]: DEBUG nova.virt.hardware [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2131.565214] env[63279]: DEBUG nova.virt.hardware [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2131.565379] env[63279]: DEBUG nova.virt.hardware [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2131.565569] env[63279]: DEBUG nova.virt.hardware [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2131.565812] env[63279]: DEBUG nova.virt.hardware [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2131.566014] env[63279]: DEBUG nova.virt.hardware [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2131.566240] env[63279]: DEBUG nova.virt.hardware [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2131.566445] env[63279]: DEBUG nova.virt.hardware [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2131.566666] env[63279]: DEBUG nova.virt.hardware [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2131.567606] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3dd1f7-23d1-4363-bb2a-cd87e5da8033 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.576884] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b91353d-53b8-4388-9126-f86949134095 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.625554] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087587, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.664066] env[63279]: DEBUG nova.network.neutron [-] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2131.695504] env[63279]: DEBUG nova.network.neutron [-] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2131.884021] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: ba2d6111-d93d-4216-b641-864b542ea253] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2131.969167] env[63279]: DEBUG nova.network.neutron [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Successfully updated port: 374ef31e-8b37-4d61-8e90-7ac4e4279a08 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2131.997886] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.482s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.998298] env[63279]: DEBUG nova.compute.manager [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2132.001039] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.025s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2132.001226] env[63279]: DEBUG nova.objects.instance [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2132.127872] env[63279]: DEBUG oslo_vmware.api [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087587, 'name': PowerOnVM_Task, 'duration_secs': 1.019512} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2132.128172] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2132.128435] env[63279]: INFO nova.compute.manager [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Took 9.46 seconds to spawn the instance on the hypervisor. [ 2132.128987] env[63279]: DEBUG nova.compute.manager [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2132.129467] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2322bb63-3206-40dd-94b2-488ee5cc741b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.167456] env[63279]: INFO nova.compute.manager [-] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Took 1.02 seconds to deallocate network for instance. [ 2132.197622] env[63279]: INFO nova.compute.manager [-] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Took 1.32 seconds to deallocate network for instance. [ 2132.387921] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 3f888d81-e73f-4486-bb64-849c873449bf] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2132.472637] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "refresh_cache-d6e40dbc-f20e-4164-b460-18de6ea72906" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2132.472859] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "refresh_cache-d6e40dbc-f20e-4164-b460-18de6ea72906" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2132.472968] env[63279]: DEBUG nova.network.neutron [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2132.509131] env[63279]: DEBUG nova.compute.utils [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2132.510616] env[63279]: DEBUG nova.compute.manager [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2132.510827] env[63279]: DEBUG nova.network.neutron [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2132.552807] env[63279]: DEBUG nova.policy [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a33208dae664ba0baeb2ec7c6206add', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fd54445958a43f9a20e86e0fedc4735', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2132.647064] env[63279]: INFO nova.compute.manager [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Took 49.19 seconds to build instance. [ 2132.677033] env[63279]: DEBUG oslo_concurrency.lockutils [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2132.704331] env[63279]: DEBUG oslo_concurrency.lockutils [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2132.860636] env[63279]: DEBUG nova.network.neutron [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Successfully created port: 4361494d-9864-49bc-a792-be9c831d7486 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2132.891478] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: df410051-d551-4a90-81f7-5630f5521a10] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2133.012320] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c2043fa-c480-4907-bbe4-d20e0c0c8ddf tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.013404] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.293s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.013649] env[63279]: DEBUG nova.objects.instance [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lazy-loading 'pci_requests' on Instance uuid 2f5e22f6-ba70-4848-965b-eb1553115323 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2133.015085] env[63279]: DEBUG nova.compute.manager [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2133.020831] env[63279]: INFO nova.compute.manager [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Rescuing [ 2133.021070] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "refresh_cache-a6758131-030e-4b33-a2c9-8864055a5bec" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2133.021240] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "refresh_cache-a6758131-030e-4b33-a2c9-8864055a5bec" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2133.021407] env[63279]: DEBUG nova.network.neutron [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2133.036198] env[63279]: DEBUG nova.network.neutron [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2133.141553] env[63279]: DEBUG nova.compute.manager [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Received event network-vif-deleted-63a3f09b-45d0-46eb-a197-186f1ec415d6 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2133.141762] env[63279]: DEBUG nova.compute.manager [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Received event network-vif-plugged-374ef31e-8b37-4d61-8e90-7ac4e4279a08 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2133.141954] env[63279]: DEBUG oslo_concurrency.lockutils [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] Acquiring lock "d6e40dbc-f20e-4164-b460-18de6ea72906-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2133.142416] env[63279]: DEBUG oslo_concurrency.lockutils [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] Lock "d6e40dbc-f20e-4164-b460-18de6ea72906-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.142599] env[63279]: DEBUG oslo_concurrency.lockutils [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] Lock "d6e40dbc-f20e-4164-b460-18de6ea72906-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.142768] env[63279]: DEBUG nova.compute.manager [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] No waiting events found dispatching network-vif-plugged-374ef31e-8b37-4d61-8e90-7ac4e4279a08 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2133.142933] env[63279]: WARNING nova.compute.manager [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Received unexpected event network-vif-plugged-374ef31e-8b37-4d61-8e90-7ac4e4279a08 for instance with vm_state building and task_state spawning. [ 2133.143110] env[63279]: DEBUG nova.compute.manager [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Received event network-changed-374ef31e-8b37-4d61-8e90-7ac4e4279a08 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2133.143266] env[63279]: DEBUG nova.compute.manager [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Refreshing instance network info cache due to event network-changed-374ef31e-8b37-4d61-8e90-7ac4e4279a08. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2133.143438] env[63279]: DEBUG oslo_concurrency.lockutils [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] Acquiring lock "refresh_cache-d6e40dbc-f20e-4164-b460-18de6ea72906" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2133.147537] env[63279]: DEBUG oslo_concurrency.lockutils [None req-703b4c29-4773-4b34-915e-6bee4bb47aad tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "a6758131-030e-4b33-a2c9-8864055a5bec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.700s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.309131] env[63279]: DEBUG nova.network.neutron [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Updating instance_info_cache with network_info: [{"id": "374ef31e-8b37-4d61-8e90-7ac4e4279a08", "address": "fa:16:3e:0b:c3:96", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap374ef31e-8b", "ovs_interfaceid": "374ef31e-8b37-4d61-8e90-7ac4e4279a08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2133.396755] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 044335c7-ce3b-4b4a-b1dc-8b9acec538b4] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2133.519765] env[63279]: DEBUG nova.objects.instance [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lazy-loading 'numa_topology' on Instance uuid 2f5e22f6-ba70-4848-965b-eb1553115323 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2133.522224] env[63279]: INFO nova.virt.block_device [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Booting with volume 62b49f21-7323-46c2-809d-db035fa9fb8c at /dev/sda [ 2133.557113] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a47352f-af91-40bc-acb0-18168686a1f8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.566222] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953e8b69-a04c-4a69-abf3-c73e2958f2bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.606469] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8a6d04e-9e02-45cf-8444-f871c88a0796 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.615686] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621291c1-9bf4-4738-b646-c43cf55d4166 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.650222] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167fe99f-e817-46eb-b811-f5dd8cd920af {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.657055] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-044826f0-cac5-45bc-96c0-0fbbfae49d81 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.675123] env[63279]: DEBUG nova.virt.block_device [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Updating existing volume attachment record: a8d67b59-523f-4969-8067-30c90b14a176 {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2133.764361] env[63279]: DEBUG nova.network.neutron [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Updating instance_info_cache with network_info: [{"id": "376a7f06-34a9-4c6e-934f-5470b0a04549", "address": "fa:16:3e:87:30:6b", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap376a7f06-34", "ovs_interfaceid": "376a7f06-34a9-4c6e-934f-5470b0a04549", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2133.813993] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "refresh_cache-d6e40dbc-f20e-4164-b460-18de6ea72906" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2133.814293] env[63279]: DEBUG nova.compute.manager [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Instance network_info: |[{"id": "374ef31e-8b37-4d61-8e90-7ac4e4279a08", "address": "fa:16:3e:0b:c3:96", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap374ef31e-8b", "ovs_interfaceid": "374ef31e-8b37-4d61-8e90-7ac4e4279a08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2133.814588] env[63279]: DEBUG oslo_concurrency.lockutils [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] Acquired lock "refresh_cache-d6e40dbc-f20e-4164-b460-18de6ea72906" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2133.814771] env[63279]: DEBUG nova.network.neutron [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Refreshing network info cache for port 374ef31e-8b37-4d61-8e90-7ac4e4279a08 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2133.815883] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:c3:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '374ef31e-8b37-4d61-8e90-7ac4e4279a08', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2133.823770] env[63279]: DEBUG oslo.service.loopingcall [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2133.826724] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2133.827185] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c266724-18d2-4e73-b3df-c21a2fedc255 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.847259] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2133.847259] env[63279]: value = "task-2087589" [ 2133.847259] env[63279]: _type = "Task" [ 2133.847259] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.855152] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087589, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.898114] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: c287072d-0ce9-4075-8895-0f64326ac303] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2134.027904] env[63279]: INFO nova.compute.claims [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2134.124598] env[63279]: DEBUG nova.network.neutron [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Updated VIF entry in instance network info cache for port 374ef31e-8b37-4d61-8e90-7ac4e4279a08. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2134.125114] env[63279]: DEBUG nova.network.neutron [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Updating instance_info_cache with network_info: [{"id": "374ef31e-8b37-4d61-8e90-7ac4e4279a08", "address": "fa:16:3e:0b:c3:96", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap374ef31e-8b", "ovs_interfaceid": "374ef31e-8b37-4d61-8e90-7ac4e4279a08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2134.267260] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "refresh_cache-a6758131-030e-4b33-a2c9-8864055a5bec" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2134.359359] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087589, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.401512] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: cf1b70af-335d-404b-bb4f-fe082dd6f450] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2134.620888] env[63279]: DEBUG nova.network.neutron [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Successfully updated port: 4361494d-9864-49bc-a792-be9c831d7486 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2134.628871] env[63279]: DEBUG oslo_concurrency.lockutils [req-adf176f4-4c24-428f-bb1c-659ea4060aec req-9ef03be0-577e-4c52-8a06-0755593f12eb service nova] Releasing lock "refresh_cache-d6e40dbc-f20e-4164-b460-18de6ea72906" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2134.857281] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087589, 'name': CreateVM_Task, 'duration_secs': 0.551027} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.857482] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2134.858204] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2134.858369] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2134.858697] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2134.858950] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-122acc4e-3aad-467d-b5d8-bef18c711d2f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.864359] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2134.864359] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]522beb88-a21c-31cc-f32a-a1c8cc67d529" [ 2134.864359] env[63279]: _type = "Task" [ 2134.864359] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.873176] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]522beb88-a21c-31cc-f32a-a1c8cc67d529, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.906341] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: e04f06de-da6a-4562-a50a-ff16bf3a006e] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2135.122713] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Acquiring lock "refresh_cache-861e4118-6134-40cf-91cb-865b6ee9f347" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2135.122911] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Acquired lock "refresh_cache-861e4118-6134-40cf-91cb-865b6ee9f347" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2135.123048] env[63279]: DEBUG nova.network.neutron [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2135.170137] env[63279]: DEBUG nova.compute.manager [req-0b9c76c7-771f-4c9c-9745-2b05faf0fcc1 req-da12bc63-22dd-460f-8ca5-dc0f2a735173 service nova] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Received event network-vif-plugged-4361494d-9864-49bc-a792-be9c831d7486 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2135.170897] env[63279]: DEBUG oslo_concurrency.lockutils [req-0b9c76c7-771f-4c9c-9745-2b05faf0fcc1 req-da12bc63-22dd-460f-8ca5-dc0f2a735173 service nova] Acquiring lock "861e4118-6134-40cf-91cb-865b6ee9f347-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2135.171152] env[63279]: DEBUG oslo_concurrency.lockutils [req-0b9c76c7-771f-4c9c-9745-2b05faf0fcc1 req-da12bc63-22dd-460f-8ca5-dc0f2a735173 service nova] Lock "861e4118-6134-40cf-91cb-865b6ee9f347-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.171338] env[63279]: DEBUG oslo_concurrency.lockutils [req-0b9c76c7-771f-4c9c-9745-2b05faf0fcc1 req-da12bc63-22dd-460f-8ca5-dc0f2a735173 service nova] Lock "861e4118-6134-40cf-91cb-865b6ee9f347-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.171518] env[63279]: DEBUG nova.compute.manager [req-0b9c76c7-771f-4c9c-9745-2b05faf0fcc1 req-da12bc63-22dd-460f-8ca5-dc0f2a735173 service nova] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] No waiting events found dispatching network-vif-plugged-4361494d-9864-49bc-a792-be9c831d7486 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2135.171719] env[63279]: WARNING nova.compute.manager [req-0b9c76c7-771f-4c9c-9745-2b05faf0fcc1 req-da12bc63-22dd-460f-8ca5-dc0f2a735173 service nova] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Received unexpected event network-vif-plugged-4361494d-9864-49bc-a792-be9c831d7486 for instance with vm_state building and task_state block_device_mapping. [ 2135.171887] env[63279]: DEBUG nova.compute.manager [req-0b9c76c7-771f-4c9c-9745-2b05faf0fcc1 req-da12bc63-22dd-460f-8ca5-dc0f2a735173 service nova] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Received event network-changed-4361494d-9864-49bc-a792-be9c831d7486 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2135.172436] env[63279]: DEBUG nova.compute.manager [req-0b9c76c7-771f-4c9c-9745-2b05faf0fcc1 req-da12bc63-22dd-460f-8ca5-dc0f2a735173 service nova] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Refreshing instance network info cache due to event network-changed-4361494d-9864-49bc-a792-be9c831d7486. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2135.172436] env[63279]: DEBUG oslo_concurrency.lockutils [req-0b9c76c7-771f-4c9c-9745-2b05faf0fcc1 req-da12bc63-22dd-460f-8ca5-dc0f2a735173 service nova] Acquiring lock "refresh_cache-861e4118-6134-40cf-91cb-865b6ee9f347" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2135.380270] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]522beb88-a21c-31cc-f32a-a1c8cc67d529, 'name': SearchDatastore_Task, 'duration_secs': 0.009864} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2135.380605] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2135.380851] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2135.381106] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2135.381263] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2135.381594] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2135.381748] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cbd1b134-a91d-4d20-9d5d-0b85e96a2b82 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.390421] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2135.390640] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2135.391573] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e729ee3-55a6-4ffe-bfb0-f135ce79d12b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.395065] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8592ea-0024-4200-9778-4138d3e89cb4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.401134] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2135.401134] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5249002e-d2d1-b486-6ba4-a021ee477fbb" [ 2135.401134] env[63279]: _type = "Task" [ 2135.401134] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.407034] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7601c9a-ceca-4eda-836d-4c615fed1204 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.410324] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 81103d53-99fe-4d1a-816f-7685c59c80ee] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2135.418180] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5249002e-d2d1-b486-6ba4-a021ee477fbb, 'name': SearchDatastore_Task, 'duration_secs': 0.010044} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2135.443909] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb6b9fa4-9746-4596-8fe4-fc0df89813d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.447044] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0d0fbf-4798-4461-b86a-4ed46042c579 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.453645] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2135.453645] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f5d9b1-0258-83c7-b76a-6db93af1cf1f" [ 2135.453645] env[63279]: _type = "Task" [ 2135.453645] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.459434] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d6c56b-b9b8-4c85-bc9f-492f3476dc08 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.470482] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f5d9b1-0258-83c7-b76a-6db93af1cf1f, 'name': SearchDatastore_Task, 'duration_secs': 0.008914} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2135.480182] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2135.480480] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] d6e40dbc-f20e-4164-b460-18de6ea72906/d6e40dbc-f20e-4164-b460-18de6ea72906.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2135.481263] env[63279]: DEBUG nova.compute.provider_tree [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2135.482484] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c60f53d2-4ba2-43aa-bc7e-73d7e40dce15 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.490164] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2135.490164] env[63279]: value = "task-2087590" [ 2135.490164] env[63279]: _type = "Task" [ 2135.490164] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.497976] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.658012] env[63279]: DEBUG nova.network.neutron [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2135.768086] env[63279]: DEBUG nova.compute.manager [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2135.769029] env[63279]: DEBUG nova.virt.hardware [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2135.769029] env[63279]: DEBUG nova.virt.hardware [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2135.769325] env[63279]: DEBUG nova.virt.hardware [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2135.769402] env[63279]: DEBUG nova.virt.hardware [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2135.769573] env[63279]: DEBUG nova.virt.hardware [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2135.769733] env[63279]: DEBUG nova.virt.hardware [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2135.770074] env[63279]: DEBUG nova.virt.hardware [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2135.770171] env[63279]: DEBUG nova.virt.hardware [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2135.770395] env[63279]: DEBUG nova.virt.hardware [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2135.770564] env[63279]: DEBUG nova.virt.hardware [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2135.770725] env[63279]: DEBUG nova.virt.hardware [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2135.771742] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e96c21-8cd2-47fa-a993-aa478d5823a0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.783558] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994dc105-4b9c-404f-bc0f-e622153cf09e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.806483] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2135.806849] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce80018e-fcba-453e-ba3a-591162b69a7e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.814198] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2135.814198] env[63279]: value = "task-2087591" [ 2135.814198] env[63279]: _type = "Task" [ 2135.814198] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.822749] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087591, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.832959] env[63279]: DEBUG nova.network.neutron [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Updating instance_info_cache with network_info: [{"id": "4361494d-9864-49bc-a792-be9c831d7486", "address": "fa:16:3e:cb:0d:58", "network": {"id": "5cd88f94-203f-4bdb-9876-889955e0a894", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1136286558-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fd54445958a43f9a20e86e0fedc4735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4361494d-98", "ovs_interfaceid": "4361494d-9864-49bc-a792-be9c831d7486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2135.914469] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: de543869-8ab1-40ed-8f6d-dc506c257843] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2135.985649] env[63279]: DEBUG nova.scheduler.client.report [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2136.000491] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087590, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485617} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.000827] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] d6e40dbc-f20e-4164-b460-18de6ea72906/d6e40dbc-f20e-4164-b460-18de6ea72906.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2136.001060] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2136.001319] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d820fd27-b783-402c-8935-9737ec630955 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.009846] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2136.009846] env[63279]: value = "task-2087592" [ 2136.009846] env[63279]: _type = "Task" [ 2136.009846] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.017916] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087592, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.324565] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087591, 'name': PowerOffVM_Task, 'duration_secs': 0.214652} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.324850] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2136.325640] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e4a798-cdbf-49cc-97fa-6c961875246f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.344936] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Releasing lock "refresh_cache-861e4118-6134-40cf-91cb-865b6ee9f347" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2136.345240] env[63279]: DEBUG nova.compute.manager [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Instance network_info: |[{"id": "4361494d-9864-49bc-a792-be9c831d7486", "address": "fa:16:3e:cb:0d:58", "network": {"id": "5cd88f94-203f-4bdb-9876-889955e0a894", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1136286558-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fd54445958a43f9a20e86e0fedc4735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4361494d-98", "ovs_interfaceid": "4361494d-9864-49bc-a792-be9c831d7486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2136.345643] env[63279]: DEBUG oslo_concurrency.lockutils [req-0b9c76c7-771f-4c9c-9745-2b05faf0fcc1 req-da12bc63-22dd-460f-8ca5-dc0f2a735173 service nova] Acquired lock "refresh_cache-861e4118-6134-40cf-91cb-865b6ee9f347" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2136.345829] env[63279]: DEBUG nova.network.neutron [req-0b9c76c7-771f-4c9c-9745-2b05faf0fcc1 req-da12bc63-22dd-460f-8ca5-dc0f2a735173 service nova] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Refreshing network info cache for port 4361494d-9864-49bc-a792-be9c831d7486 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2136.347057] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:0d:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4361494d-9864-49bc-a792-be9c831d7486', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2136.354818] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Creating folder: Project (9fd54445958a43f9a20e86e0fedc4735). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2136.355481] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba836add-c07a-400f-9e80-01f1e8bd8101 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.359115] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d59e5944-971c-475b-a073-2ecc698e3124 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.372922] env[63279]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 2136.373096] env[63279]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63279) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 2136.373749] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Folder already exists: Project (9fd54445958a43f9a20e86e0fedc4735). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 2136.373749] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Creating folder: Instances. Parent ref: group-v427678. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2136.374012] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aed2dee7-ad2a-4d3b-b5f0-e92d8fd535a2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.384632] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Created folder: Instances in parent group-v427678. [ 2136.384950] env[63279]: DEBUG oslo.service.loopingcall [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2136.385068] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2136.385274] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cae6ded0-b829-438a-a2a2-9f2fa6290c01 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.402416] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2136.403013] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-457ac355-2b99-480f-aed1-2117b5e63d29 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.409953] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2136.409953] env[63279]: value = "task-2087596" [ 2136.409953] env[63279]: _type = "Task" [ 2136.409953] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.411044] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2136.411044] env[63279]: value = "task-2087595" [ 2136.411044] env[63279]: _type = "Task" [ 2136.411044] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.417590] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 977fa519-2db3-4ee5-981d-c46820a8c72e] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2136.425342] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087596, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.425914] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] VM already powered off {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2136.426134] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2136.426394] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2136.426578] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2136.426736] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2136.426997] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eef20642-33e8-4e5f-bf31-a58a85f78f00 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.436633] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2136.436828] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2136.437626] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccb19b53-ccc8-416e-8992-030432d614bc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.444031] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2136.444031] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5228dfd0-6cb3-24bc-afa8-19b1ff0331e9" [ 2136.444031] env[63279]: _type = "Task" [ 2136.444031] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.452664] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5228dfd0-6cb3-24bc-afa8-19b1ff0331e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.491712] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.478s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2136.494055] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.288s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2136.494253] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2136.496032] env[63279]: DEBUG oslo_concurrency.lockutils [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.193s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2136.496230] env[63279]: DEBUG oslo_concurrency.lockutils [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2136.497836] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.118s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2136.498034] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2136.499625] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 19.564s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2136.522190] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087592, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064025} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.523383] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2136.524205] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edeb6a6b-f09a-43b0-9e5e-b7f42487b37a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.529636] env[63279]: INFO nova.scheduler.client.report [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Deleted allocations for instance f7f88f1a-a81a-4208-88d7-6a264e642ab1 [ 2136.532861] env[63279]: INFO nova.scheduler.client.report [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Deleted allocations for instance cd00cb0e-30e5-4a0c-8612-ea92e5e32edd [ 2136.560133] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] d6e40dbc-f20e-4164-b460-18de6ea72906/d6e40dbc-f20e-4164-b460-18de6ea72906.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2136.561188] env[63279]: INFO nova.scheduler.client.report [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Deleted allocations for instance fd9b1666-8e06-4ed0-9187-05a40e136a1d [ 2136.563065] env[63279]: INFO nova.network.neutron [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Updating port 8383ed07-21e7-43e0-82a2-0afc179ca995 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2136.565264] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e86e51b3-e478-47f2-8eff-5ab1bb14b172 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.590565] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2136.590565] env[63279]: value = "task-2087597" [ 2136.590565] env[63279]: _type = "Task" [ 2136.590565] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.599392] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087597, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.920199] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087596, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.927677] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: acf95fad-316c-4605-9008-24d4d7c05892] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2136.953926] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5228dfd0-6cb3-24bc-afa8-19b1ff0331e9, 'name': SearchDatastore_Task, 'duration_secs': 0.008934} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.955299] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92f7e661-6008-4e21-9d2c-d309303fe4d6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.961916] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2136.961916] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]528020c6-eecb-d880-0384-56455f214d11" [ 2136.961916] env[63279]: _type = "Task" [ 2136.961916] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.970742] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]528020c6-eecb-d880-0384-56455f214d11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.054718] env[63279]: DEBUG oslo_concurrency.lockutils [None req-04f7e4ed-d7ff-4e9a-9fc5-7e015957e936 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "cd00cb0e-30e5-4a0c-8612-ea92e5e32edd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.898s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2137.065926] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cac71d6b-40f6-4627-b031-84d6a90fb5d5 tempest-ServerShowV257Test-1874161788 tempest-ServerShowV257Test-1874161788-project-member] Lock "f7f88f1a-a81a-4208-88d7-6a264e642ab1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.111s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2137.090019] env[63279]: DEBUG oslo_concurrency.lockutils [None req-87c40409-e3eb-4bc2-bdc5-ee97999f97b7 tempest-ListServersNegativeTestJSON-168309111 tempest-ListServersNegativeTestJSON-168309111-project-member] Lock "fd9b1666-8e06-4ed0-9187-05a40e136a1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.853s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2137.101507] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087597, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.125184] env[63279]: DEBUG nova.network.neutron [req-0b9c76c7-771f-4c9c-9745-2b05faf0fcc1 req-da12bc63-22dd-460f-8ca5-dc0f2a735173 service nova] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Updated VIF entry in instance network info cache for port 4361494d-9864-49bc-a792-be9c831d7486. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2137.125510] env[63279]: DEBUG nova.network.neutron [req-0b9c76c7-771f-4c9c-9745-2b05faf0fcc1 req-da12bc63-22dd-460f-8ca5-dc0f2a735173 service nova] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Updating instance_info_cache with network_info: [{"id": "4361494d-9864-49bc-a792-be9c831d7486", "address": "fa:16:3e:cb:0d:58", "network": {"id": "5cd88f94-203f-4bdb-9876-889955e0a894", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1136286558-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fd54445958a43f9a20e86e0fedc4735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4361494d-98", "ovs_interfaceid": "4361494d-9864-49bc-a792-be9c831d7486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2137.424757] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56d7b10-90bb-4c01-b50e-400f97d42b70 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.431018] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087596, 'name': CreateVM_Task, 'duration_secs': 0.511539} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2137.431018] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2137.431018] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sda', 'delete_on_termination': True, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427687', 'volume_id': '62b49f21-7323-46c2-809d-db035fa9fb8c', 'name': 'volume-62b49f21-7323-46c2-809d-db035fa9fb8c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '861e4118-6134-40cf-91cb-865b6ee9f347', 'attached_at': '', 'detached_at': '', 'volume_id': '62b49f21-7323-46c2-809d-db035fa9fb8c', 'serial': '62b49f21-7323-46c2-809d-db035fa9fb8c'}, 'boot_index': 0, 'device_type': None, 'attachment_id': 'a8d67b59-523f-4969-8067-30c90b14a176', 'volume_type': None}], 'swap': None} {{(pid=63279) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2137.431304] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Root volume attach. Driver type: vmdk {{(pid=63279) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 2137.431304] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d396e5dd-7f9e-428f-97b6-0131cdc59696 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.435543] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 5bb445d3-1b12-4a1b-ad2a-cbc929b13aee] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2137.439788] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad59ded-4135-4843-ab35-f1726e88227c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.447227] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd350af-7a20-4e11-a6f6-897ca0429e5f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.483893] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb44d09-77b6-451a-9b04-ae60108a68d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.489269] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95454992-49d5-489f-a73f-c94551e61539 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.497481] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]528020c6-eecb-d880-0384-56455f214d11, 'name': SearchDatastore_Task, 'duration_secs': 0.009056} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2137.498067] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2137.498485] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] a6758131-030e-4b33-a2c9-8864055a5bec/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk. {{(pid=63279) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2137.500023] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b833195-2fd3-476b-81cb-09d101335d1c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.505403] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3689a85d-a784-437c-9f68-414d657af5f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.507888] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-3a1251f8-4c5f-4229-ac11-f6352830faca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.518334] env[63279]: DEBUG nova.compute.provider_tree [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2137.522590] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2137.522590] env[63279]: value = "task-2087598" [ 2137.522590] env[63279]: _type = "Task" [ 2137.522590] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.526014] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Waiting for the task: (returnval){ [ 2137.526014] env[63279]: value = "task-2087599" [ 2137.526014] env[63279]: _type = "Task" [ 2137.526014] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.532814] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087598, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.540654] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087599, 'name': RelocateVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.601344] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087597, 'name': ReconfigVM_Task, 'duration_secs': 0.62791} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2137.601738] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Reconfigured VM instance instance-0000004d to attach disk [datastore1] d6e40dbc-f20e-4164-b460-18de6ea72906/d6e40dbc-f20e-4164-b460-18de6ea72906.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2137.602318] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f7aaeb26-b878-435b-b732-133ee2ab2d9f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.610106] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2137.610106] env[63279]: value = "task-2087600" [ 2137.610106] env[63279]: _type = "Task" [ 2137.610106] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.619074] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087600, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.627727] env[63279]: DEBUG oslo_concurrency.lockutils [req-0b9c76c7-771f-4c9c-9745-2b05faf0fcc1 req-da12bc63-22dd-460f-8ca5-dc0f2a735173 service nova] Releasing lock "refresh_cache-861e4118-6134-40cf-91cb-865b6ee9f347" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2137.944906] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 5572bb1c-b378-4531-8459-10c2a2b7afdf] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2138.026079] env[63279]: DEBUG nova.scheduler.client.report [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2138.045181] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087598, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.050686] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087599, 'name': RelocateVM_Task, 'duration_secs': 0.446741} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2138.051044] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2138.051253] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427687', 'volume_id': '62b49f21-7323-46c2-809d-db035fa9fb8c', 'name': 'volume-62b49f21-7323-46c2-809d-db035fa9fb8c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '861e4118-6134-40cf-91cb-865b6ee9f347', 'attached_at': '', 'detached_at': '', 'volume_id': '62b49f21-7323-46c2-809d-db035fa9fb8c', 'serial': '62b49f21-7323-46c2-809d-db035fa9fb8c'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2138.052328] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774d11dd-c6d8-4de4-b93e-cb143bde580c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.074996] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b049279a-d9bf-4b3d-aaad-33b98bc9e4af {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.100702] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] volume-62b49f21-7323-46c2-809d-db035fa9fb8c/volume-62b49f21-7323-46c2-809d-db035fa9fb8c.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2138.102175] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14b0fe95-db92-4de9-8966-12d0d8f0dc8d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.119739] env[63279]: DEBUG nova.compute.manager [req-4015bbdf-ea54-45dc-8982-65cfd26afb3d req-c9f7449f-57b9-49d9-9d5e-94cc8a084f60 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Received event network-vif-plugged-8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2138.119970] env[63279]: DEBUG oslo_concurrency.lockutils [req-4015bbdf-ea54-45dc-8982-65cfd26afb3d req-c9f7449f-57b9-49d9-9d5e-94cc8a084f60 service nova] Acquiring lock "2f5e22f6-ba70-4848-965b-eb1553115323-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2138.120288] env[63279]: DEBUG oslo_concurrency.lockutils [req-4015bbdf-ea54-45dc-8982-65cfd26afb3d req-c9f7449f-57b9-49d9-9d5e-94cc8a084f60 service nova] Lock "2f5e22f6-ba70-4848-965b-eb1553115323-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2138.120383] env[63279]: DEBUG oslo_concurrency.lockutils [req-4015bbdf-ea54-45dc-8982-65cfd26afb3d req-c9f7449f-57b9-49d9-9d5e-94cc8a084f60 service nova] Lock "2f5e22f6-ba70-4848-965b-eb1553115323-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2138.120633] env[63279]: DEBUG nova.compute.manager [req-4015bbdf-ea54-45dc-8982-65cfd26afb3d req-c9f7449f-57b9-49d9-9d5e-94cc8a084f60 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] No waiting events found dispatching network-vif-plugged-8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2138.120789] env[63279]: WARNING nova.compute.manager [req-4015bbdf-ea54-45dc-8982-65cfd26afb3d req-c9f7449f-57b9-49d9-9d5e-94cc8a084f60 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Received unexpected event network-vif-plugged-8383ed07-21e7-43e0-82a2-0afc179ca995 for instance with vm_state shelved_offloaded and task_state spawning. [ 2138.133111] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087600, 'name': Rename_Task, 'duration_secs': 0.173707} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2138.133386] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2138.133725] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Waiting for the task: (returnval){ [ 2138.133725] env[63279]: value = "task-2087601" [ 2138.133725] env[63279]: _type = "Task" [ 2138.133725] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2138.133942] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f7da445-b3f0-486b-8605-085139b85d5a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.145140] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087601, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.146182] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2138.146182] env[63279]: value = "task-2087602" [ 2138.146182] env[63279]: _type = "Task" [ 2138.146182] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2138.158540] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087602, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.242718] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2138.242993] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquired lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2138.244515] env[63279]: DEBUG nova.network.neutron [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2138.452591] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: f20e8eb2-847b-4988-abf9-ed5f9f65c25c] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2138.549490] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087598, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.646807] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087601, 'name': ReconfigVM_Task, 'duration_secs': 0.298693} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2138.651109] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Reconfigured VM instance instance-0000004e to attach disk [datastore1] volume-62b49f21-7323-46c2-809d-db035fa9fb8c/volume-62b49f21-7323-46c2-809d-db035fa9fb8c.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2138.655334] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91714077-943e-4285-90b6-66183c56740e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.673411] env[63279]: DEBUG oslo_vmware.api [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087602, 'name': PowerOnVM_Task, 'duration_secs': 0.447073} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2138.675058] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2138.675058] env[63279]: INFO nova.compute.manager [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Took 7.14 seconds to spawn the instance on the hypervisor. [ 2138.675058] env[63279]: DEBUG nova.compute.manager [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2138.675388] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Waiting for the task: (returnval){ [ 2138.675388] env[63279]: value = "task-2087603" [ 2138.675388] env[63279]: _type = "Task" [ 2138.675388] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2138.678058] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab01af22-5635-4f0c-a06a-40eacfa39f24 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.692039] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087603, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.954601] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: a8107fa5-9e8b-41dd-9679-8e106a3496a5] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2139.045029] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.545s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2139.047882] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087598, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.048350] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.046s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2139.050086] env[63279]: INFO nova.compute.claims [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2139.115876] env[63279]: DEBUG nova.network.neutron [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Updating instance_info_cache with network_info: [{"id": "8383ed07-21e7-43e0-82a2-0afc179ca995", "address": "fa:16:3e:b6:12:ab", "network": {"id": "a7b83e75-3b16-41db-9395-90dead128e80", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-756195345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd8bdc7d8664db698236f56d82adcf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8383ed07-21", "ovs_interfaceid": "8383ed07-21e7-43e0-82a2-0afc179ca995", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2139.189749] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087603, 'name': ReconfigVM_Task, 'duration_secs': 0.132414} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.190126] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427687', 'volume_id': '62b49f21-7323-46c2-809d-db035fa9fb8c', 'name': 'volume-62b49f21-7323-46c2-809d-db035fa9fb8c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '861e4118-6134-40cf-91cb-865b6ee9f347', 'attached_at': '', 'detached_at': '', 'volume_id': '62b49f21-7323-46c2-809d-db035fa9fb8c', 'serial': '62b49f21-7323-46c2-809d-db035fa9fb8c'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2139.190697] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b4289aa1-97aa-4d93-bfea-7bea9f872b12 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.203192] env[63279]: INFO nova.compute.manager [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Took 43.33 seconds to build instance. [ 2139.206535] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Waiting for the task: (returnval){ [ 2139.206535] env[63279]: value = "task-2087604" [ 2139.206535] env[63279]: _type = "Task" [ 2139.206535] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.219209] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087604, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.459503] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: ff2f355a-9687-4491-b243-6133e4b7b866] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2139.543840] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087598, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.686446} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.544122] env[63279]: INFO nova.virt.vmwareapi.ds_util [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] a6758131-030e-4b33-a2c9-8864055a5bec/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk. [ 2139.544907] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e43760-6c8d-4dad-b7b5-28383013cb99 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.572900] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] a6758131-030e-4b33-a2c9-8864055a5bec/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2139.575498] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db82ff33-3d5e-408e-bad3-b39ada8df9bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.594571] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2139.594571] env[63279]: value = "task-2087605" [ 2139.594571] env[63279]: _type = "Task" [ 2139.594571] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.603199] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087605, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.618336] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Releasing lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2139.647591] env[63279]: DEBUG nova.virt.hardware [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='90630de4bd87d672a8e33ffc56d77ff0',container_format='bare',created_at=2025-01-13T17:56:02Z,direct_url=,disk_format='vmdk',id=9d99d2f6-606a-4ba2-a64b-746ca6b6b68e,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-336657912-shelved',owner='0fd8bdc7d8664db698236f56d82adcf0',properties=ImageMetaProps,protected=,size=31670272,status='active',tags=,updated_at=2025-01-13T17:56:20Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2139.647819] env[63279]: DEBUG nova.virt.hardware [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2139.647981] env[63279]: DEBUG nova.virt.hardware [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2139.648191] env[63279]: DEBUG nova.virt.hardware [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2139.648344] env[63279]: DEBUG nova.virt.hardware [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2139.648541] env[63279]: DEBUG nova.virt.hardware [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2139.648697] env[63279]: DEBUG nova.virt.hardware [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2139.648943] env[63279]: DEBUG nova.virt.hardware [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2139.649054] env[63279]: DEBUG nova.virt.hardware [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2139.649649] env[63279]: DEBUG nova.virt.hardware [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2139.649649] env[63279]: DEBUG nova.virt.hardware [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2139.650436] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcdc27e0-1b11-47e9-a279-4508d8683673 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.658430] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb5f011-aeb7-40d7-b15a-6e90b72a2797 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.674224] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:12:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53955f0e-c162-4cef-8bd5-335b369c36b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8383ed07-21e7-43e0-82a2-0afc179ca995', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2139.681896] env[63279]: DEBUG oslo.service.loopingcall [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2139.682208] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2139.682426] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1276bfd7-b840-45f4-9fe9-b8a176171fd5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.702959] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2139.702959] env[63279]: value = "task-2087606" [ 2139.702959] env[63279]: _type = "Task" [ 2139.702959] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.708591] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88e1e34d-1334-4ce4-a237-c76a589e2359 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d6e40dbc-f20e-4164-b460-18de6ea72906" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.847s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2139.714511] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087606, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.719400] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087604, 'name': Rename_Task, 'duration_secs': 0.128591} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.719656] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2139.719907] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8325b209-b629-418b-b477-76042f8ec116 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.726749] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Waiting for the task: (returnval){ [ 2139.726749] env[63279]: value = "task-2087607" [ 2139.726749] env[63279]: _type = "Task" [ 2139.726749] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.734994] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087607, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.778580] env[63279]: INFO nova.scheduler.client.report [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleted allocation for migration 44650656-4b9e-403b-9bcc-56f29df93e8b [ 2139.966242] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: ecedded1-7169-49a4-8a9e-2fe4086db986] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2140.111073] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087605, 'name': ReconfigVM_Task, 'duration_secs': 0.510179} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2140.111373] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Reconfigured VM instance instance-0000004c to attach disk [datastore1] a6758131-030e-4b33-a2c9-8864055a5bec/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2140.112335] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aca697d-4e0c-4502-be12-6157eb5e3a33 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.147821] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-101b14c1-391e-4253-afc3-2fbe921200c5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.165447] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2140.165447] env[63279]: value = "task-2087608" [ 2140.165447] env[63279]: _type = "Task" [ 2140.165447] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2140.174071] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087608, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.222300] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087606, 'name': CreateVM_Task, 'duration_secs': 0.419416} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2140.226576] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2140.228283] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2140.228334] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2140.228825] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2140.240512] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d868c4ea-79ff-46aa-b574-a713626cc826 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.250545] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087607, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.252584] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2140.252584] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5286a301-1c17-4c8f-7e26-7da21589e4a9" [ 2140.252584] env[63279]: _type = "Task" [ 2140.252584] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2140.271152] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2140.271529] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Processing image 9d99d2f6-606a-4ba2-a64b-746ca6b6b68e {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2140.271903] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2140.272327] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2140.272579] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2140.274854] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3fd1cd9-0522-43a4-a81e-600143d4a616 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.284244] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bb47e242-d054-4951-a4e2-4a2b181effb2 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "b981ac83-6c23-4d44-bd28-12da30d746bd" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 26.355s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2140.285457] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2140.285642] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2140.291018] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-600e5ba3-ccb8-4b0f-9dfd-86f05c8abc67 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.302397] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2140.302397] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527cac14-b604-2ad6-48bd-3340d76f580a" [ 2140.302397] env[63279]: _type = "Task" [ 2140.302397] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2140.315763] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Preparing fetch location {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2140.316179] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Fetch image to [datastore1] OSTACK_IMG_6f99a7f4-17f1-4e02-91fa-575cc9adf1ac/OSTACK_IMG_6f99a7f4-17f1-4e02-91fa-575cc9adf1ac.vmdk {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2140.316495] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Downloading stream optimized image 9d99d2f6-606a-4ba2-a64b-746ca6b6b68e to [datastore1] OSTACK_IMG_6f99a7f4-17f1-4e02-91fa-575cc9adf1ac/OSTACK_IMG_6f99a7f4-17f1-4e02-91fa-575cc9adf1ac.vmdk on the data store datastore1 as vApp {{(pid=63279) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2140.316778] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Downloading image file data 9d99d2f6-606a-4ba2-a64b-746ca6b6b68e to the ESX as VM named 'OSTACK_IMG_6f99a7f4-17f1-4e02-91fa-575cc9adf1ac' {{(pid=63279) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2140.414405] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2140.414405] env[63279]: value = "resgroup-9" [ 2140.414405] env[63279]: _type = "ResourcePool" [ 2140.414405] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2140.414728] env[63279]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a6decd11-a8a7-4036-be21-5ea775c7059e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.445881] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lease: (returnval){ [ 2140.445881] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f52bc9-15ae-0d77-f3a1-d29811434ef0" [ 2140.445881] env[63279]: _type = "HttpNfcLease" [ 2140.445881] env[63279]: } obtained for vApp import into resource pool (val){ [ 2140.445881] env[63279]: value = "resgroup-9" [ 2140.445881] env[63279]: _type = "ResourcePool" [ 2140.445881] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2140.445881] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the lease: (returnval){ [ 2140.445881] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f52bc9-15ae-0d77-f3a1-d29811434ef0" [ 2140.445881] env[63279]: _type = "HttpNfcLease" [ 2140.445881] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2140.458522] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2140.458522] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f52bc9-15ae-0d77-f3a1-d29811434ef0" [ 2140.458522] env[63279]: _type = "HttpNfcLease" [ 2140.458522] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2140.469320] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 5d4be656-defe-4332-b97e-e88b107ca4a1] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2140.477274] env[63279]: DEBUG nova.compute.manager [req-a574894d-6cae-4109-b1a9-0b8e8061be09 req-9c457ca9-04ed-4cf0-8693-7cbbb828cd41 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Received event network-changed-8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2140.477274] env[63279]: DEBUG nova.compute.manager [req-a574894d-6cae-4109-b1a9-0b8e8061be09 req-9c457ca9-04ed-4cf0-8693-7cbbb828cd41 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Refreshing instance network info cache due to event network-changed-8383ed07-21e7-43e0-82a2-0afc179ca995. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2140.477274] env[63279]: DEBUG oslo_concurrency.lockutils [req-a574894d-6cae-4109-b1a9-0b8e8061be09 req-9c457ca9-04ed-4cf0-8693-7cbbb828cd41 service nova] Acquiring lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2140.477274] env[63279]: DEBUG oslo_concurrency.lockutils [req-a574894d-6cae-4109-b1a9-0b8e8061be09 req-9c457ca9-04ed-4cf0-8693-7cbbb828cd41 service nova] Acquired lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2140.477274] env[63279]: DEBUG nova.network.neutron [req-a574894d-6cae-4109-b1a9-0b8e8061be09 req-9c457ca9-04ed-4cf0-8693-7cbbb828cd41 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Refreshing network info cache for port 8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2140.562754] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a11732-b96a-4b16-a79b-df2677338108 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.571876] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892fba94-612f-407e-8c80-07f7a5cdbb70 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.603434] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc73a15b-a421-440b-9a1e-f0cd7e4213f4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.611476] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c0a6f6-807a-4380-973b-8f7fc5d6ef55 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.627266] env[63279]: DEBUG nova.compute.provider_tree [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2140.675128] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087608, 'name': ReconfigVM_Task, 'duration_secs': 0.309119} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2140.675409] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2140.675689] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a8db803-e51c-4e67-8849-07eca7a57456 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.681659] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2140.681659] env[63279]: value = "task-2087610" [ 2140.681659] env[63279]: _type = "Task" [ 2140.681659] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2140.690196] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087610, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.736294] env[63279]: DEBUG oslo_vmware.api [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087607, 'name': PowerOnVM_Task, 'duration_secs': 0.583925} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2140.736608] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2140.736829] env[63279]: INFO nova.compute.manager [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Took 4.97 seconds to spawn the instance on the hypervisor. [ 2140.737022] env[63279]: DEBUG nova.compute.manager [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2140.737902] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea2096f-6292-4a5f-bb23-a9f25ff3e2a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.954908] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2140.954908] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f52bc9-15ae-0d77-f3a1-d29811434ef0" [ 2140.954908] env[63279]: _type = "HttpNfcLease" [ 2140.954908] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2140.972661] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 357f08c9-4de9-4b84-8384-6bf130872f40] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2140.987352] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "d6e40dbc-f20e-4164-b460-18de6ea72906" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2140.987830] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d6e40dbc-f20e-4164-b460-18de6ea72906" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2140.988130] env[63279]: INFO nova.compute.manager [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Shelving [ 2141.131979] env[63279]: DEBUG nova.scheduler.client.report [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2141.193643] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087610, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.265040] env[63279]: INFO nova.compute.manager [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Took 34.70 seconds to build instance. [ 2141.458512] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2141.458512] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f52bc9-15ae-0d77-f3a1-d29811434ef0" [ 2141.458512] env[63279]: _type = "HttpNfcLease" [ 2141.458512] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2141.476237] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 32e84715-0345-4171-abb7-c034a501347e] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2141.636722] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.637462] env[63279]: DEBUG nova.compute.manager [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2141.646697] env[63279]: DEBUG nova.network.neutron [req-a574894d-6cae-4109-b1a9-0b8e8061be09 req-9c457ca9-04ed-4cf0-8693-7cbbb828cd41 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Updated VIF entry in instance network info cache for port 8383ed07-21e7-43e0-82a2-0afc179ca995. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2141.647035] env[63279]: DEBUG nova.network.neutron [req-a574894d-6cae-4109-b1a9-0b8e8061be09 req-9c457ca9-04ed-4cf0-8693-7cbbb828cd41 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Updating instance_info_cache with network_info: [{"id": "8383ed07-21e7-43e0-82a2-0afc179ca995", "address": "fa:16:3e:b6:12:ab", "network": {"id": "a7b83e75-3b16-41db-9395-90dead128e80", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-756195345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd8bdc7d8664db698236f56d82adcf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8383ed07-21", "ovs_interfaceid": "8383ed07-21e7-43e0-82a2-0afc179ca995", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2141.648116] env[63279]: DEBUG oslo_concurrency.lockutils [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.587s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2141.648334] env[63279]: DEBUG nova.objects.instance [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lazy-loading 'resources' on Instance uuid 668436f9-94e9-48c2-a3d4-3df7bbcf5775 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2141.694208] env[63279]: DEBUG oslo_vmware.api [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087610, 'name': PowerOnVM_Task, 'duration_secs': 0.773672} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2141.694493] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2141.697417] env[63279]: DEBUG nova.compute.manager [None req-f4048d48-5cdd-4c96-b4ef-85f231e5f9dc tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2141.698294] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59dec173-5677-4c46-9cf1-6b904d26bfc4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.768069] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4110a38f-8529-4f8e-8573-c35840515d98 tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Lock "861e4118-6134-40cf-91cb-865b6ee9f347" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.213s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.962211] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2141.962211] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f52bc9-15ae-0d77-f3a1-d29811434ef0" [ 2141.962211] env[63279]: _type = "HttpNfcLease" [ 2141.962211] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2141.962830] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2141.962830] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f52bc9-15ae-0d77-f3a1-d29811434ef0" [ 2141.962830] env[63279]: _type = "HttpNfcLease" [ 2141.962830] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2141.963919] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce7f9a2-2512-475a-a670-bad199724c88 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.974099] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52768f89-0a64-36c5-4b90-ed21fb0cb443/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2141.974473] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Creating HTTP connection to write to file with size = 31670272 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52768f89-0a64-36c5-4b90-ed21fb0cb443/disk-0.vmdk. {{(pid=63279) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2142.036699] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 00fb1bc8-2f70-4fa3-a6a2-54fd38ba89c7] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2142.039326] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2142.041252] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5987fc45-d65e-4f86-b53e-e7c91cca844d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.048227] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1b87a84a-80e0-476f-bf93-db471837bf8a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.054318] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2142.054318] env[63279]: value = "task-2087611" [ 2142.054318] env[63279]: _type = "Task" [ 2142.054318] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2142.072789] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087611, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.153273] env[63279]: DEBUG oslo_concurrency.lockutils [req-a574894d-6cae-4109-b1a9-0b8e8061be09 req-9c457ca9-04ed-4cf0-8693-7cbbb828cd41 service nova] Releasing lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2142.153273] env[63279]: DEBUG nova.compute.utils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2142.157530] env[63279]: DEBUG nova.compute.manager [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2142.157742] env[63279]: DEBUG nova.network.neutron [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2142.218114] env[63279]: DEBUG nova.policy [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17fbc0127a5944ac933232873f282980', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '77ee2145dda94e2b85eeb7379ed98e26', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2142.280265] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "b981ac83-6c23-4d44-bd28-12da30d746bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2142.281705] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "b981ac83-6c23-4d44-bd28-12da30d746bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2142.281705] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "b981ac83-6c23-4d44-bd28-12da30d746bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2142.281705] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "b981ac83-6c23-4d44-bd28-12da30d746bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2142.281961] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "b981ac83-6c23-4d44-bd28-12da30d746bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.287844] env[63279]: INFO nova.compute.manager [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Terminating instance [ 2142.542304] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 41654a82-af5e-463e-80f9-86ba13a5ad2e] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2142.572667] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087611, 'name': PowerOffVM_Task, 'duration_secs': 0.254974} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2142.579104] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2142.580949] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbab5f9-34a2-4883-b4b1-6bae1b2f65ff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.609903] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aacabb6-f4fc-4f79-9e0b-3fbd15658021 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.661441] env[63279]: DEBUG nova.compute.manager [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2142.705861] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4965a0-b36d-484c-a637-009d2b3ec623 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.712379] env[63279]: DEBUG nova.network.neutron [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Successfully created port: 87fce8ba-c254-4a0f-b9c6-e1db73a6ae03 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2142.726753] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de459db6-97e3-473c-8041-4ead36840cb2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.773627] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d29649-9760-49b7-bfd8-ee4e29f12080 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.777718] env[63279]: DEBUG nova.compute.manager [req-69941031-8285-4e38-bac0-d054b7ea3216 req-ad2e37df-5ddc-4874-97d4-85a7ee52b6e4 service nova] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Received event network-changed-4361494d-9864-49bc-a792-be9c831d7486 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2142.778812] env[63279]: DEBUG nova.compute.manager [req-69941031-8285-4e38-bac0-d054b7ea3216 req-ad2e37df-5ddc-4874-97d4-85a7ee52b6e4 service nova] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Refreshing instance network info cache due to event network-changed-4361494d-9864-49bc-a792-be9c831d7486. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2142.778812] env[63279]: DEBUG oslo_concurrency.lockutils [req-69941031-8285-4e38-bac0-d054b7ea3216 req-ad2e37df-5ddc-4874-97d4-85a7ee52b6e4 service nova] Acquiring lock "refresh_cache-861e4118-6134-40cf-91cb-865b6ee9f347" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2142.778812] env[63279]: DEBUG oslo_concurrency.lockutils [req-69941031-8285-4e38-bac0-d054b7ea3216 req-ad2e37df-5ddc-4874-97d4-85a7ee52b6e4 service nova] Acquired lock "refresh_cache-861e4118-6134-40cf-91cb-865b6ee9f347" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2142.778812] env[63279]: DEBUG nova.network.neutron [req-69941031-8285-4e38-bac0-d054b7ea3216 req-ad2e37df-5ddc-4874-97d4-85a7ee52b6e4 service nova] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Refreshing network info cache for port 4361494d-9864-49bc-a792-be9c831d7486 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2142.789943] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2451d19d-4216-4d84-956c-9b680a3f1767 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.797167] env[63279]: DEBUG nova.compute.manager [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2142.797464] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2142.799334] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dff0d67-8264-436d-a756-fcfc8c468101 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.814671] env[63279]: DEBUG nova.compute.provider_tree [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2142.822080] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2142.822702] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c32630b4-1a71-419b-bea9-5ca5b5ae9345 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.830812] env[63279]: DEBUG oslo_vmware.api [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2142.830812] env[63279]: value = "task-2087612" [ 2142.830812] env[63279]: _type = "Task" [ 2142.830812] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2142.840645] env[63279]: DEBUG oslo_vmware.api [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087612, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.048426] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 1a55008a-4d8c-403d-a8f4-966aa7346f4c] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2143.071298] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.071949] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.131528] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2143.132175] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e5f65e1e-9774-4e57-954c-02261432f664 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.143507] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2143.143507] env[63279]: value = "task-2087613" [ 2143.143507] env[63279]: _type = "Task" [ 2143.143507] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.160609] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087613, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.325052] env[63279]: DEBUG nova.scheduler.client.report [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2143.348055] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Completed reading data from the image iterator. {{(pid=63279) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2143.348637] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52768f89-0a64-36c5-4b90-ed21fb0cb443/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2143.355349] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27af233f-6d5f-4d8f-bd59-b969bd8a88b8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.370830] env[63279]: DEBUG oslo_vmware.api [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087612, 'name': PowerOffVM_Task, 'duration_secs': 0.224121} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2143.372029] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52768f89-0a64-36c5-4b90-ed21fb0cb443/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2143.372029] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52768f89-0a64-36c5-4b90-ed21fb0cb443/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2143.372029] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2143.372228] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2143.372558] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-b5c4050a-6291-431b-9b76-f382b5714f3e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.375395] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d17ee41f-1948-4714-98f8-1962c326d71d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.555040] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 5d4909ea-396c-45ba-9ff5-acb8576150b3] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2143.574820] env[63279]: DEBUG nova.compute.manager [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2143.592941] env[63279]: DEBUG oslo_vmware.rw_handles [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52768f89-0a64-36c5-4b90-ed21fb0cb443/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2143.592941] env[63279]: INFO nova.virt.vmwareapi.images [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Downloaded image file data 9d99d2f6-606a-4ba2-a64b-746ca6b6b68e [ 2143.592941] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0c4a04-e4bf-4948-b5e2-3e7e8c2a3851 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.619973] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8ebc5c0-c207-4b99-abb5-234833e38a47 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.645569] env[63279]: INFO nova.virt.vmwareapi.images [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] The imported VM was unregistered [ 2143.649540] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Caching image {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2143.649540] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Creating directory with path [datastore1] devstack-image-cache_base/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2143.649540] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f514c3a-9788-4ced-a400-a63f56942a5f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.661879] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087613, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.665952] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Created directory with path [datastore1] devstack-image-cache_base/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2143.665952] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_6f99a7f4-17f1-4e02-91fa-575cc9adf1ac/OSTACK_IMG_6f99a7f4-17f1-4e02-91fa-575cc9adf1ac.vmdk to [datastore1] devstack-image-cache_base/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e.vmdk. {{(pid=63279) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2143.665952] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-3e6789fe-608e-4dd2-a94a-e5219d40666e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.672095] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2143.672095] env[63279]: value = "task-2087616" [ 2143.672095] env[63279]: _type = "Task" [ 2143.672095] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.676701] env[63279]: DEBUG nova.compute.manager [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2143.690154] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087616, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.707259] env[63279]: DEBUG nova.virt.hardware [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='a5032d107f94f41b52ab4da0809b235e',container_format='bare',created_at=2025-01-13T17:56:56Z,direct_url=,disk_format='vmdk',id=d2d04a47-fc99-4ff0-adbc-a27616252dd0,min_disk=1,min_ram=0,name='tempest-test-snap-1225801073',owner='77ee2145dda94e2b85eeb7379ed98e26',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-01-13T17:57:13Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2143.707575] env[63279]: DEBUG nova.virt.hardware [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2143.708193] env[63279]: DEBUG nova.virt.hardware [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2143.708193] env[63279]: DEBUG nova.virt.hardware [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2143.708193] env[63279]: DEBUG nova.virt.hardware [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2143.708366] env[63279]: DEBUG nova.virt.hardware [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2143.708579] env[63279]: DEBUG nova.virt.hardware [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2143.708753] env[63279]: DEBUG nova.virt.hardware [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2143.708970] env[63279]: DEBUG nova.virt.hardware [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2143.709151] env[63279]: DEBUG nova.virt.hardware [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2143.709333] env[63279]: DEBUG nova.virt.hardware [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2143.710316] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82c5e64-a263-4748-858e-186c3e7a6798 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.718284] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf58b2e8-963d-468b-ab4d-d63ac7caa2c2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.724283] env[63279]: DEBUG nova.network.neutron [req-69941031-8285-4e38-bac0-d054b7ea3216 req-ad2e37df-5ddc-4874-97d4-85a7ee52b6e4 service nova] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Updated VIF entry in instance network info cache for port 4361494d-9864-49bc-a792-be9c831d7486. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2143.724668] env[63279]: DEBUG nova.network.neutron [req-69941031-8285-4e38-bac0-d054b7ea3216 req-ad2e37df-5ddc-4874-97d4-85a7ee52b6e4 service nova] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Updating instance_info_cache with network_info: [{"id": "4361494d-9864-49bc-a792-be9c831d7486", "address": "fa:16:3e:cb:0d:58", "network": {"id": "5cd88f94-203f-4bdb-9876-889955e0a894", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1136286558-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fd54445958a43f9a20e86e0fedc4735", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4361494d-98", "ovs_interfaceid": "4361494d-9864-49bc-a792-be9c831d7486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2143.836375] env[63279]: DEBUG oslo_concurrency.lockutils [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.188s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2143.839426] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.475s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.839710] env[63279]: DEBUG nova.objects.instance [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lazy-loading 'resources' on Instance uuid f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2143.878114] env[63279]: INFO nova.scheduler.client.report [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Deleted allocations for instance 668436f9-94e9-48c2-a3d4-3df7bbcf5775 [ 2144.059645] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 3d4db43f-5784-46e1-9710-f6becec011e2] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2144.096505] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2144.161784] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087613, 'name': CreateSnapshot_Task, 'duration_secs': 0.607121} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.162099] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2144.163046] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b64439-1ebe-4592-a4f4-ddf0c447caa0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.188257] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087616, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.228139] env[63279]: DEBUG oslo_concurrency.lockutils [req-69941031-8285-4e38-bac0-d054b7ea3216 req-ad2e37df-5ddc-4874-97d4-85a7ee52b6e4 service nova] Releasing lock "refresh_cache-861e4118-6134-40cf-91cb-865b6ee9f347" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2144.395041] env[63279]: DEBUG oslo_concurrency.lockutils [None req-801dfe63-15ee-4acc-bd55-4bd47afbcfa8 tempest-ImagesOneServerNegativeTestJSON-631623048 tempest-ImagesOneServerNegativeTestJSON-631623048-project-member] Lock "668436f9-94e9-48c2-a3d4-3df7bbcf5775" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.402s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2144.564482] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: c05e9aaa-e251-480c-94d6-56c29bb6282d] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2144.691063] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2144.698589] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6fcbda99-9e23-4013-b3ce-1692a321219d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.704034] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087616, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.713435] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2144.713435] env[63279]: value = "task-2087617" [ 2144.713435] env[63279]: _type = "Task" [ 2144.713435] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2144.730169] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087617, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.784164] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a827ca8-321b-49fa-963a-d65d8d198bf2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.794121] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999362a7-d1bb-4c1e-8c5f-1162f445e042 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.836653] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49383292-56a2-4f1f-a653-5cd0f71f6402 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.856035] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b2eedf-19b0-45cf-a063-68761df70aac {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.879547] env[63279]: DEBUG nova.compute.provider_tree [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2145.071153] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: fcc5a636-554f-424e-a604-a8e7bd7cf574] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2145.190412] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087616, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.227025] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087617, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.355024] env[63279]: INFO nova.compute.manager [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Rescuing [ 2145.355294] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "refresh_cache-4ce17bdb-4bed-4e06-af13-e4097b55e17d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2145.355456] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "refresh_cache-4ce17bdb-4bed-4e06-af13-e4097b55e17d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2145.355634] env[63279]: DEBUG nova.network.neutron [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2145.382915] env[63279]: DEBUG nova.scheduler.client.report [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2145.577269] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: abd63285-ee3c-4546-b86d-6d4388765d94] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2145.697059] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087616, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.697059] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "686a7ce2-2d07-411e-91d6-0471c55c3728" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2145.697059] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "686a7ce2-2d07-411e-91d6-0471c55c3728" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2145.729448] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087617, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.888066] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.048s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2145.890756] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.037s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2145.891222] env[63279]: DEBUG nova.objects.instance [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Lazy-loading 'resources' on Instance uuid a0697601-46ae-48ce-a3e1-3c4b81fc1f95 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2145.913119] env[63279]: INFO nova.scheduler.client.report [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Deleted allocations for instance f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b [ 2146.080818] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 7cd673bb-3795-4c6f-a1e0-a8ea0da0a35f] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2146.130247] env[63279]: DEBUG nova.network.neutron [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Updating instance_info_cache with network_info: [{"id": "72b975f4-3f92-4d65-8d8f-119eeb59c2ab", "address": "fa:16:3e:f0:d6:f2", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72b975f4-3f", "ovs_interfaceid": "72b975f4-3f92-4d65-8d8f-119eeb59c2ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2146.192763] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087616, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.39927} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.192763] env[63279]: INFO nova.virt.vmwareapi.ds_util [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_6f99a7f4-17f1-4e02-91fa-575cc9adf1ac/OSTACK_IMG_6f99a7f4-17f1-4e02-91fa-575cc9adf1ac.vmdk to [datastore1] devstack-image-cache_base/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e.vmdk. [ 2146.192763] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Cleaning up location [datastore1] OSTACK_IMG_6f99a7f4-17f1-4e02-91fa-575cc9adf1ac {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2146.192763] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_6f99a7f4-17f1-4e02-91fa-575cc9adf1ac {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2146.192763] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05a79936-bd6b-4368-9d24-f5dd74aeb339 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.199286] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2146.199286] env[63279]: value = "task-2087618" [ 2146.199286] env[63279]: _type = "Task" [ 2146.199286] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.200988] env[63279]: DEBUG nova.compute.manager [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2146.214218] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087618, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.226757] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087617, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.419666] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4456497-7827-48a8-a6eb-7caa57d3bb09 tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.754s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.584122] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 24bce28c-fc43-4f17-9800-4d980f6729bc] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2146.636679] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "refresh_cache-4ce17bdb-4bed-4e06-af13-e4097b55e17d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2146.707976] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087618, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033563} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.709107] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2146.709361] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2146.709562] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e.vmdk to [datastore1] 2f5e22f6-ba70-4848-965b-eb1553115323/2f5e22f6-ba70-4848-965b-eb1553115323.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2146.713427] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f31f001-d4ed-44d2-a7f5-4cadedb843e3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.717036] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9212ad21-eff2-4106-a2ee-c50616247f15 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.725101] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752ed0a4-2536-4820-b1eb-1aaeb5919aa5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.732496] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087617, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.732782] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2146.732782] env[63279]: value = "task-2087619" [ 2146.732782] env[63279]: _type = "Task" [ 2146.732782] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.733649] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.763242] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6530efcc-f32c-4085-9155-1c0adaba8513 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.768751] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087619, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.773451] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e1cb5d-20cd-4251-9a34-dbf5064d7e93 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.786509] env[63279]: DEBUG nova.compute.provider_tree [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2147.089021] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 21c2bba1-5482-496a-9e2a-f123a94ed48a] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2147.232488] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087617, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.245293] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087619, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.290324] env[63279]: DEBUG nova.scheduler.client.report [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2147.595727] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: a301d225-684d-4f88-bc9b-7e02b8115b9d] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2147.718686] env[63279]: DEBUG oslo_concurrency.lockutils [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "5656c853-ac83-47be-83c4-979a9e87ab91" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2147.718994] env[63279]: DEBUG oslo_concurrency.lockutils [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "5656c853-ac83-47be-83c4-979a9e87ab91" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2147.719234] env[63279]: DEBUG oslo_concurrency.lockutils [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "5656c853-ac83-47be-83c4-979a9e87ab91-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2147.719557] env[63279]: DEBUG oslo_concurrency.lockutils [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "5656c853-ac83-47be-83c4-979a9e87ab91-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2147.719748] env[63279]: DEBUG oslo_concurrency.lockutils [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "5656c853-ac83-47be-83c4-979a9e87ab91-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2147.722109] env[63279]: INFO nova.compute.manager [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Terminating instance [ 2147.738151] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087617, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.751331] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087619, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.796265] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.905s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2147.798900] env[63279]: DEBUG oslo_concurrency.lockutils [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.683s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2147.799205] env[63279]: DEBUG nova.objects.instance [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lazy-loading 'resources' on Instance uuid ee1b4746-49ac-425c-8219-4d54cb34abe0 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2147.822300] env[63279]: INFO nova.scheduler.client.report [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Deleted allocations for instance a0697601-46ae-48ce-a3e1-3c4b81fc1f95 [ 2148.098491] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 8c712d0d-55c2-4a14-b759-9441594211e1] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2148.171156] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2148.171518] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8454cd60-685b-4a28-9ea8-1c632804e1b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.182502] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2148.182502] env[63279]: value = "task-2087620" [ 2148.182502] env[63279]: _type = "Task" [ 2148.182502] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.195873] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087620, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.231274] env[63279]: DEBUG nova.compute.manager [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2148.231549] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2148.237151] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263e8f9c-4cd4-4465-87ea-6f91058d42e1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.240621] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087617, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.251328] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087619, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.253893] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2148.254634] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9f1252d-1547-4759-a938-e51f8f6c2d45 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.263288] env[63279]: DEBUG oslo_vmware.api [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2148.263288] env[63279]: value = "task-2087621" [ 2148.263288] env[63279]: _type = "Task" [ 2148.263288] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.275028] env[63279]: DEBUG oslo_vmware.api [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087621, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.332057] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e1db163f-06c3-4493-ba10-345607e865db tempest-ServerMetadataTestJSON-1974700823 tempest-ServerMetadataTestJSON-1974700823-project-member] Lock "a0697601-46ae-48ce-a3e1-3c4b81fc1f95" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.072s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2148.602434] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: ac1d0e8f-446a-4a6d-a916-08f52426396d] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2148.675883] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c464daae-cfb3-4a66-8968-41ec0667205e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.689787] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce442c16-42a5-4a70-8ab3-81a8aa35ba3e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.696453] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087620, 'name': PowerOffVM_Task, 'duration_secs': 0.388439} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.697218] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2148.698068] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f59575e-59ac-4c98-b0c2-fe2b162cb153 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.733453] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d362965f-9069-4210-83aa-bda89f2b502e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.755526] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b742f760-31a6-4db0-8e2e-4d9e1326b90d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.765563] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087617, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.766141] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6145a951-900e-4b5d-90ff-18af059f213c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.782025] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087619, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.795519] env[63279]: DEBUG nova.compute.provider_tree [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2148.800163] env[63279]: DEBUG oslo_vmware.api [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087621, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.818860] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2148.819177] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-715876ea-9d80-4025-a74b-024f356c6dd2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.831814] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2148.831814] env[63279]: value = "task-2087622" [ 2148.831814] env[63279]: _type = "Task" [ 2148.831814] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.846762] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] VM already powered off {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2148.846980] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2148.847253] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2148.847470] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2148.847647] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2148.847916] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7525702-79eb-4c89-b7ab-ff67e9185917 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.867983] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2148.868206] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2148.868982] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f43ae3cc-f558-4103-9972-5a9ad565b760 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.873064] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2148.873194] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2148.873836] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleting the datastore file [datastore1] b981ac83-6c23-4d44-bd28-12da30d746bd {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2148.873968] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5727ea7-0c09-4daf-9545-c85a9f65456c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.882813] env[63279]: DEBUG oslo_vmware.api [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2148.882813] env[63279]: value = "task-2087623" [ 2148.882813] env[63279]: _type = "Task" [ 2148.882813] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.883220] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2148.883220] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52fbe660-057e-3457-97cf-9abeec4c562e" [ 2148.883220] env[63279]: _type = "Task" [ 2148.883220] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.896514] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fbe660-057e-3457-97cf-9abeec4c562e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.899849] env[63279]: DEBUG oslo_vmware.api [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087623, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.107522] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 4063d5e0-1144-40fa-8ed8-efda16730617] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2149.239085] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087617, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.258775] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087619, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.36466} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.259052] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e/9d99d2f6-606a-4ba2-a64b-746ca6b6b68e.vmdk to [datastore1] 2f5e22f6-ba70-4848-965b-eb1553115323/2f5e22f6-ba70-4848-965b-eb1553115323.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2149.260060] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891910d6-d3a9-4d5b-801b-26e7a865490f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.286100] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 2f5e22f6-ba70-4848-965b-eb1553115323/2f5e22f6-ba70-4848-965b-eb1553115323.vmdk or device None with type streamOptimized {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2149.287219] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a96d47ae-36d0-4dc7-a9c3-394ce0cd0791 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.306347] env[63279]: DEBUG nova.scheduler.client.report [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2149.309839] env[63279]: DEBUG oslo_vmware.api [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087621, 'name': PowerOffVM_Task, 'duration_secs': 0.890715} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.313023] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2149.313023] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2149.313023] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf7defda-e3ab-4f88-858d-671bf9f1d206 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.315947] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2149.315947] env[63279]: value = "task-2087624" [ 2149.315947] env[63279]: _type = "Task" [ 2149.315947] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.326606] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087624, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.397261] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fbe660-057e-3457-97cf-9abeec4c562e, 'name': SearchDatastore_Task, 'duration_secs': 0.093058} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.404358] env[63279]: DEBUG oslo_vmware.api [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087623, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.375144} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.405029] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94f75f59-475e-4165-95ee-ed45f72fe641 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.407752] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2149.407961] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2149.408275] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2149.408473] env[63279]: INFO nova.compute.manager [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Took 6.61 seconds to destroy the instance on the hypervisor. [ 2149.408726] env[63279]: DEBUG oslo.service.loopingcall [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2149.409381] env[63279]: DEBUG nova.compute.manager [-] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2149.409567] env[63279]: DEBUG nova.network.neutron [-] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2149.413468] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2149.413673] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2149.413895] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Deleting the datastore file [datastore1] 5656c853-ac83-47be-83c4-979a9e87ab91 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2149.414173] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3ce2495b-bba4-449b-80c3-0a52fee7f8fd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.419014] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2149.419014] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523d5d89-1cd8-cc3d-a3e9-db7b8ff5de6f" [ 2149.419014] env[63279]: _type = "Task" [ 2149.419014] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.422191] env[63279]: DEBUG oslo_vmware.api [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for the task: (returnval){ [ 2149.422191] env[63279]: value = "task-2087626" [ 2149.422191] env[63279]: _type = "Task" [ 2149.422191] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.428049] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523d5d89-1cd8-cc3d-a3e9-db7b8ff5de6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.433087] env[63279]: DEBUG oslo_vmware.api [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087626, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.611317] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 0224e4ea-c13c-4abd-9626-6038c0bbe4e9] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2149.739278] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087617, 'name': CloneVM_Task} progress is 95%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.812350] env[63279]: DEBUG oslo_concurrency.lockutils [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.013s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.814682] env[63279]: DEBUG oslo_concurrency.lockutils [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.138s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2149.814917] env[63279]: DEBUG nova.objects.instance [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lazy-loading 'resources' on Instance uuid 6699de0a-b3f8-4d84-9c9b-d0f6899a606e {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2149.830099] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087624, 'name': ReconfigVM_Task, 'duration_secs': 0.298139} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.830099] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 2f5e22f6-ba70-4848-965b-eb1553115323/2f5e22f6-ba70-4848-965b-eb1553115323.vmdk or device None with type streamOptimized {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2149.830099] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-88a85cb2-3cac-40b6-a796-148554cf4a7f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.839792] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2149.839792] env[63279]: value = "task-2087627" [ 2149.839792] env[63279]: _type = "Task" [ 2149.839792] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.851240] env[63279]: INFO nova.scheduler.client.report [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Deleted allocations for instance ee1b4746-49ac-425c-8219-4d54cb34abe0 [ 2149.866639] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087627, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.931484] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523d5d89-1cd8-cc3d-a3e9-db7b8ff5de6f, 'name': SearchDatastore_Task, 'duration_secs': 0.019611} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.935368] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2149.935860] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 4ce17bdb-4bed-4e06-af13-e4097b55e17d/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk. {{(pid=63279) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2149.936438] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2237e44d-1a98-4a6e-980f-7528bef8c7a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.945548] env[63279]: DEBUG oslo_vmware.api [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087626, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.946840] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2149.946840] env[63279]: value = "task-2087628" [ 2149.946840] env[63279]: _type = "Task" [ 2149.946840] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.957510] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.116396] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2150.116396] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Cleaning up deleted instances with incomplete migration {{(pid=63279) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11785}} [ 2150.243953] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087617, 'name': CloneVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.277694] env[63279]: DEBUG nova.compute.manager [req-6c9e8dc5-24d7-4495-8b83-2264cbfdea7a req-ea30fac3-9432-4314-9138-0f78bb3350a0 service nova] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Received event network-vif-deleted-61682992-df73-44ba-b302-ba8e00c82f95 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2150.277911] env[63279]: INFO nova.compute.manager [req-6c9e8dc5-24d7-4495-8b83-2264cbfdea7a req-ea30fac3-9432-4314-9138-0f78bb3350a0 service nova] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Neutron deleted interface 61682992-df73-44ba-b302-ba8e00c82f95; detaching it from the instance and deleting it from the info cache [ 2150.278103] env[63279]: DEBUG nova.network.neutron [req-6c9e8dc5-24d7-4495-8b83-2264cbfdea7a req-ea30fac3-9432-4314-9138-0f78bb3350a0 service nova] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2150.360037] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087627, 'name': Rename_Task, 'duration_secs': 0.303314} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.360037] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2150.360037] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64b7a8d4-846f-4184-a905-75017156d988 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.370542] env[63279]: DEBUG oslo_concurrency.lockutils [None req-82e40318-dbac-4e6b-8658-00dbc2129519 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "ee1b4746-49ac-425c-8219-4d54cb34abe0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.236s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2150.375316] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2150.375316] env[63279]: value = "task-2087629" [ 2150.375316] env[63279]: _type = "Task" [ 2150.375316] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2150.389373] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087629, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.465144] env[63279]: DEBUG oslo_vmware.api [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Task: {'id': task-2087626, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.57635} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.466978] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2150.467414] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2150.467688] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2150.468489] env[63279]: INFO nova.compute.manager [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Took 2.24 seconds to destroy the instance on the hypervisor. [ 2150.468582] env[63279]: DEBUG oslo.service.loopingcall [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2150.475565] env[63279]: DEBUG nova.compute.manager [-] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2150.475774] env[63279]: DEBUG nova.network.neutron [-] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2150.479653] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087628, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.619043] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2150.737496] env[63279]: DEBUG nova.network.neutron [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Successfully updated port: 87fce8ba-c254-4a0f-b9c6-e1db73a6ae03 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2150.752056] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087617, 'name': CloneVM_Task, 'duration_secs': 5.557123} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.752056] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Created linked-clone VM from snapshot [ 2150.752056] env[63279]: DEBUG nova.network.neutron [-] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2150.752569] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6169473-7d81-4f2f-967a-65814efebf24 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.757536] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f48e4e2-5a53-4a3b-b439-9e0607647462 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.769762] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Uploading image a718bd62-8e3b-4c6b-8cc7-172dc4b359d0 {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2150.773546] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e770c5c-e329-45f2-acc6-304b94bb19b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.810738] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f246ef21-71f5-4ea5-9972-4482f92e8c9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.813084] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec6c0db-50ce-44fb-a884-8885ac63c549 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.822375] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc0c359-e8e9-417d-ad25-b4e13b5ff5ea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.827919] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2150.827919] env[63279]: value = "vm-427710" [ 2150.827919] env[63279]: _type = "VirtualMachine" [ 2150.827919] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2150.833377] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef02982-089f-403c-acb9-2c8555ab8811 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.841950] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-75fe050c-9f01-4726-a604-4588eedb479d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.853548] env[63279]: DEBUG nova.compute.provider_tree [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2150.858547] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lease: (returnval){ [ 2150.858547] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527291a1-e44c-7f65-c4d1-e262cd5f5b61" [ 2150.858547] env[63279]: _type = "HttpNfcLease" [ 2150.858547] env[63279]: } obtained for exporting VM: (result){ [ 2150.858547] env[63279]: value = "vm-427710" [ 2150.858547] env[63279]: _type = "VirtualMachine" [ 2150.858547] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2150.858547] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the lease: (returnval){ [ 2150.858547] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527291a1-e44c-7f65-c4d1-e262cd5f5b61" [ 2150.858547] env[63279]: _type = "HttpNfcLease" [ 2150.858547] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2150.872597] env[63279]: DEBUG nova.compute.manager [req-6c9e8dc5-24d7-4495-8b83-2264cbfdea7a req-ea30fac3-9432-4314-9138-0f78bb3350a0 service nova] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Detach interface failed, port_id=61682992-df73-44ba-b302-ba8e00c82f95, reason: Instance b981ac83-6c23-4d44-bd28-12da30d746bd could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2150.883139] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2150.883139] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527291a1-e44c-7f65-c4d1-e262cd5f5b61" [ 2150.883139] env[63279]: _type = "HttpNfcLease" [ 2150.883139] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2150.883912] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2150.883912] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527291a1-e44c-7f65-c4d1-e262cd5f5b61" [ 2150.883912] env[63279]: _type = "HttpNfcLease" [ 2150.883912] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2150.885235] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f259fc4-f4b3-4a09-89db-1c90d85400ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.629363] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "refresh_cache-4acfb474-d861-467a-983c-0dd5641e66f3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2151.629697] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "refresh_cache-4acfb474-d861-467a-983c-0dd5641e66f3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2151.629697] env[63279]: DEBUG nova.network.neutron [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2151.631140] env[63279]: INFO nova.compute.manager [-] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Took 2.22 seconds to deallocate network for instance. [ 2151.632356] env[63279]: DEBUG nova.scheduler.client.report [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2151.636031] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087629, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.636272] env[63279]: WARNING oslo_vmware.common.loopingcall [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] task run outlasted interval by 0.258192 sec [ 2151.653781] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521ff1c1-eb08-13e0-998f-ea5cbff36e1b/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2151.654118] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521ff1c1-eb08-13e0-998f-ea5cbff36e1b/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2151.661255] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087628, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5729} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.661255] env[63279]: INFO nova.virt.vmwareapi.ds_util [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 4ce17bdb-4bed-4e06-af13-e4097b55e17d/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk. [ 2151.719065] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9da8f9-70bd-46f6-bc50-7dcf398d56ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.726302] env[63279]: DEBUG oslo_vmware.api [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087629, 'name': PowerOnVM_Task, 'duration_secs': 0.533886} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.726302] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2151.751222] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 4ce17bdb-4bed-4e06-af13-e4097b55e17d/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2151.751586] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2bd6797a-031a-4049-8191-9aa85a3d9142 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.770453] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2151.770453] env[63279]: value = "task-2087631" [ 2151.770453] env[63279]: _type = "Task" [ 2151.770453] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2151.779617] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-875a0cb6-fd66-45f7-b0ea-52318aff877d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.781244] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087631, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.839612] env[63279]: DEBUG nova.compute.manager [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2151.840620] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a59b264-6a17-4ad5-ac72-5ea9dbee0e6d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.129144] env[63279]: DEBUG nova.network.neutron [-] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2152.146833] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2152.147887] env[63279]: DEBUG oslo_concurrency.lockutils [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.333s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2152.150592] env[63279]: DEBUG oslo_concurrency.lockutils [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.446s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2152.150944] env[63279]: DEBUG nova.objects.instance [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lazy-loading 'resources' on Instance uuid 4a9088e0-2992-4b18-8be9-6bc70633369b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2152.178953] env[63279]: DEBUG nova.network.neutron [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2152.181988] env[63279]: INFO nova.scheduler.client.report [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Deleted allocations for instance 6699de0a-b3f8-4d84-9c9b-d0f6899a606e [ 2152.283620] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087631, 'name': ReconfigVM_Task, 'duration_secs': 0.397133} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2152.284270] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 4ce17bdb-4bed-4e06-af13-e4097b55e17d/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2152.286646] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0309a6a7-f140-4306-8948-a123890a3611 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.317028] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93aec003-b6df-48f6-905f-302cafc44241 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.332912] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2152.332912] env[63279]: value = "task-2087632" [ 2152.332912] env[63279]: _type = "Task" [ 2152.332912] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2152.341621] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087632, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2152.360480] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2641ecb3-368c-4954-9833-7470f0184c80 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "2f5e22f6-ba70-4848-965b-eb1553115323" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 39.673s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2152.441396] env[63279]: DEBUG nova.network.neutron [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Updating instance_info_cache with network_info: [{"id": "87fce8ba-c254-4a0f-b9c6-e1db73a6ae03", "address": "fa:16:3e:6e:aa:aa", "network": {"id": "6d7c4c21-3389-4688-b373-329b2328f275", "bridge": "br-int", "label": "tempest-ImagesTestJSON-311425692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77ee2145dda94e2b85eeb7379ed98e26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87fce8ba-c2", "ovs_interfaceid": "87fce8ba-c254-4a0f-b9c6-e1db73a6ae03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2152.452556] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Acquiring lock "32dbef6d-d314-4fa6-972a-e7b1f22eb11d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2152.452626] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Lock "32dbef6d-d314-4fa6-972a-e7b1f22eb11d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2152.453275] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Acquiring lock "32dbef6d-d314-4fa6-972a-e7b1f22eb11d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2152.453816] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Lock "32dbef6d-d314-4fa6-972a-e7b1f22eb11d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2152.454047] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Lock "32dbef6d-d314-4fa6-972a-e7b1f22eb11d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2152.459569] env[63279]: INFO nova.compute.manager [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Terminating instance [ 2152.502915] env[63279]: DEBUG nova.compute.manager [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Received event network-vif-plugged-87fce8ba-c254-4a0f-b9c6-e1db73a6ae03 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2152.504229] env[63279]: DEBUG oslo_concurrency.lockutils [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] Acquiring lock "4acfb474-d861-467a-983c-0dd5641e66f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2152.504229] env[63279]: DEBUG oslo_concurrency.lockutils [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] Lock "4acfb474-d861-467a-983c-0dd5641e66f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2152.504229] env[63279]: DEBUG oslo_concurrency.lockutils [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] Lock "4acfb474-d861-467a-983c-0dd5641e66f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2152.504443] env[63279]: DEBUG nova.compute.manager [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] No waiting events found dispatching network-vif-plugged-87fce8ba-c254-4a0f-b9c6-e1db73a6ae03 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2152.504926] env[63279]: WARNING nova.compute.manager [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Received unexpected event network-vif-plugged-87fce8ba-c254-4a0f-b9c6-e1db73a6ae03 for instance with vm_state building and task_state spawning. [ 2152.505311] env[63279]: DEBUG nova.compute.manager [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Received event network-changed-87fce8ba-c254-4a0f-b9c6-e1db73a6ae03 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2152.505738] env[63279]: DEBUG nova.compute.manager [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Refreshing instance network info cache due to event network-changed-87fce8ba-c254-4a0f-b9c6-e1db73a6ae03. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2152.506023] env[63279]: DEBUG oslo_concurrency.lockutils [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] Acquiring lock "refresh_cache-4acfb474-d861-467a-983c-0dd5641e66f3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2152.633625] env[63279]: INFO nova.compute.manager [-] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Took 2.16 seconds to deallocate network for instance. [ 2152.696601] env[63279]: DEBUG oslo_concurrency.lockutils [None req-308f424e-23d3-4823-9772-097e3a23627d tempest-ServerShowV247Test-509101738 tempest-ServerShowV247Test-509101738-project-member] Lock "6699de0a-b3f8-4d84-9c9b-d0f6899a606e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.559s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2152.844936] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087632, 'name': ReconfigVM_Task, 'duration_secs': 0.214725} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2152.845253] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2152.845516] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a65f5b05-d1a7-43b1-8532-57219d97bcef {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.853683] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2152.853683] env[63279]: value = "task-2087633" [ 2152.853683] env[63279]: _type = "Task" [ 2152.853683] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2152.863318] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087633, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2152.946828] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "refresh_cache-4acfb474-d861-467a-983c-0dd5641e66f3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2152.946828] env[63279]: DEBUG nova.compute.manager [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Instance network_info: |[{"id": "87fce8ba-c254-4a0f-b9c6-e1db73a6ae03", "address": "fa:16:3e:6e:aa:aa", "network": {"id": "6d7c4c21-3389-4688-b373-329b2328f275", "bridge": "br-int", "label": "tempest-ImagesTestJSON-311425692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77ee2145dda94e2b85eeb7379ed98e26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87fce8ba-c2", "ovs_interfaceid": "87fce8ba-c254-4a0f-b9c6-e1db73a6ae03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2152.946984] env[63279]: DEBUG oslo_concurrency.lockutils [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] Acquired lock "refresh_cache-4acfb474-d861-467a-983c-0dd5641e66f3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2152.946984] env[63279]: DEBUG nova.network.neutron [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Refreshing network info cache for port 87fce8ba-c254-4a0f-b9c6-e1db73a6ae03 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2152.951412] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:aa:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0df968ae-c1ef-4009-a0f4-6f2e799c2fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87fce8ba-c254-4a0f-b9c6-e1db73a6ae03', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2152.961776] env[63279]: DEBUG oslo.service.loopingcall [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2152.967294] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2152.967525] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "5b5f87cb-cf35-418f-b5bd-b953524a285c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2152.967868] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "5b5f87cb-cf35-418f-b5bd-b953524a285c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2152.968764] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "5b5f87cb-cf35-418f-b5bd-b953524a285c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2152.968764] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "5b5f87cb-cf35-418f-b5bd-b953524a285c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2152.968764] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "5b5f87cb-cf35-418f-b5bd-b953524a285c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2152.970641] env[63279]: DEBUG nova.compute.manager [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2152.970878] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2152.971555] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2e04e0c-db51-40c7-b588-a43fea4f2707 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.989244] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3034ead9-6dd4-4571-8afb-86ebc2a357c1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.995507] env[63279]: INFO nova.compute.manager [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Terminating instance [ 2153.011674] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2153.013012] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cac16a12-3b28-487f-a0a8-41cf89a03de3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.014851] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2153.014851] env[63279]: value = "task-2087634" [ 2153.014851] env[63279]: _type = "Task" [ 2153.014851] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2153.024410] env[63279]: DEBUG oslo_vmware.api [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Waiting for the task: (returnval){ [ 2153.024410] env[63279]: value = "task-2087635" [ 2153.024410] env[63279]: _type = "Task" [ 2153.024410] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2153.031501] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087634, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.036628] env[63279]: DEBUG oslo_vmware.api [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2087635, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.067026] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbc2ad1-50f0-45c4-b1a3-f273ac7061ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.076421] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0563a5ea-4911-40ed-a877-a7400278b11d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.108023] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5155d8ca-b5ee-4efb-9aa4-068951a3bc70 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.114645] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cfee58-6995-48fb-85f3-4c3929512d46 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.129365] env[63279]: DEBUG nova.compute.provider_tree [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2153.141398] env[63279]: DEBUG oslo_concurrency.lockutils [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2153.365529] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087633, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.508152] env[63279]: DEBUG nova.compute.manager [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2153.508411] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2153.508820] env[63279]: DEBUG oslo_concurrency.lockutils [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "interface-eca98392-98be-405b-b799-463ef9ee3dc8-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2153.509053] env[63279]: DEBUG oslo_concurrency.lockutils [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-eca98392-98be-405b-b799-463ef9ee3dc8-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2153.509381] env[63279]: DEBUG nova.objects.instance [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'flavor' on Instance uuid eca98392-98be-405b-b799-463ef9ee3dc8 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2153.512036] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103c264a-321d-4058-84c9-cae34aa0ae23 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.524235] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2153.527451] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc7e8e24-aa63-42f5-8671-724450e22159 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.529692] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087634, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.541424] env[63279]: DEBUG oslo_vmware.api [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2087635, 'name': PowerOffVM_Task, 'duration_secs': 0.270329} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2153.543095] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2153.543223] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2153.543524] env[63279]: DEBUG oslo_vmware.api [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2153.543524] env[63279]: value = "task-2087636" [ 2153.543524] env[63279]: _type = "Task" [ 2153.543524] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2153.543772] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6caaebb-cbb9-421e-8ec7-4b84d4d7f568 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.558472] env[63279]: DEBUG oslo_vmware.api [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087636, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.632590] env[63279]: DEBUG nova.scheduler.client.report [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2153.709744] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2153.709744] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2153.709744] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Deleting the datastore file [datastore1] 32dbef6d-d314-4fa6-972a-e7b1f22eb11d {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2153.710124] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4030f022-23f0-4956-9b4d-25b9f3c9ca14 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.716784] env[63279]: DEBUG oslo_vmware.api [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Waiting for the task: (returnval){ [ 2153.716784] env[63279]: value = "task-2087638" [ 2153.716784] env[63279]: _type = "Task" [ 2153.716784] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2153.726418] env[63279]: DEBUG nova.network.neutron [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Updated VIF entry in instance network info cache for port 87fce8ba-c254-4a0f-b9c6-e1db73a6ae03. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2153.727076] env[63279]: DEBUG nova.network.neutron [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Updating instance_info_cache with network_info: [{"id": "87fce8ba-c254-4a0f-b9c6-e1db73a6ae03", "address": "fa:16:3e:6e:aa:aa", "network": {"id": "6d7c4c21-3389-4688-b373-329b2328f275", "bridge": "br-int", "label": "tempest-ImagesTestJSON-311425692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77ee2145dda94e2b85eeb7379ed98e26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87fce8ba-c2", "ovs_interfaceid": "87fce8ba-c254-4a0f-b9c6-e1db73a6ae03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2153.732349] env[63279]: DEBUG oslo_vmware.api [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2087638, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.864310] env[63279]: DEBUG oslo_vmware.api [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087633, 'name': PowerOnVM_Task, 'duration_secs': 0.659919} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2153.864660] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2153.867844] env[63279]: DEBUG nova.compute.manager [None req-2fd1437a-62f2-4367-9ac0-3c3fbe7d56ab tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2153.868848] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6428372f-80e9-49a4-b136-7cb5b0107b99 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.036926] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087634, 'name': CreateVM_Task, 'duration_secs': 0.624764} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2154.037358] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2154.038268] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d2d04a47-fc99-4ff0-adbc-a27616252dd0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2154.038829] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d2d04a47-fc99-4ff0-adbc-a27616252dd0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2154.039105] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d2d04a47-fc99-4ff0-adbc-a27616252dd0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2154.039512] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99bc1eac-9680-4b23-9154-38508fa79c88 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.047249] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2154.047249] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]520b48a0-bde6-8e07-39f8-172ca0ac7cdc" [ 2154.047249] env[63279]: _type = "Task" [ 2154.047249] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2154.063764] env[63279]: DEBUG oslo_vmware.api [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087636, 'name': PowerOffVM_Task, 'duration_secs': 0.270231} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2154.069374] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2154.069540] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2154.069954] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520b48a0-bde6-8e07-39f8-172ca0ac7cdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2154.070304] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09fc79c3-ae9f-4883-83bd-8477d345b275 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.136686] env[63279]: DEBUG nova.objects.instance [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'pci_requests' on Instance uuid eca98392-98be-405b-b799-463ef9ee3dc8 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2154.138689] env[63279]: DEBUG oslo_concurrency.lockutils [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.988s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2154.141799] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.045s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2154.145544] env[63279]: INFO nova.compute.claims [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2154.164906] env[63279]: INFO nova.scheduler.client.report [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleted allocations for instance 4a9088e0-2992-4b18-8be9-6bc70633369b [ 2154.227315] env[63279]: DEBUG oslo_vmware.api [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Task: {'id': task-2087638, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.489566} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2154.227772] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2154.227890] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2154.228017] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2154.228199] env[63279]: INFO nova.compute.manager [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Took 1.26 seconds to destroy the instance on the hypervisor. [ 2154.228583] env[63279]: DEBUG oslo.service.loopingcall [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2154.228861] env[63279]: DEBUG nova.compute.manager [-] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2154.228861] env[63279]: DEBUG nova.network.neutron [-] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2154.233682] env[63279]: DEBUG oslo_concurrency.lockutils [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] Releasing lock "refresh_cache-4acfb474-d861-467a-983c-0dd5641e66f3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2154.233926] env[63279]: DEBUG nova.compute.manager [req-f1d8180c-fd91-439b-bc9e-b501ba632362 req-d5c074ba-ccb4-4768-aa35-3d832531b08d service nova] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Received event network-vif-deleted-2cc4a33a-bd88-4aec-a588-7c821bebf971 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2154.317525] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2154.318094] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2154.575122] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d2d04a47-fc99-4ff0-adbc-a27616252dd0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2154.576321] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Processing image d2d04a47-fc99-4ff0-adbc-a27616252dd0 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2154.576321] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d2d04a47-fc99-4ff0-adbc-a27616252dd0/d2d04a47-fc99-4ff0-adbc-a27616252dd0.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2154.576452] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d2d04a47-fc99-4ff0-adbc-a27616252dd0/d2d04a47-fc99-4ff0-adbc-a27616252dd0.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2154.576781] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2154.577334] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82e17dea-9ce8-4756-bb78-6be47bf5165c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.592823] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2154.592823] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2154.592823] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92f80bc7-e0e4-4e69-8f60-17b40ffb686d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.601018] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2154.601018] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d061d9-3816-0a8f-cf9e-0dc070acc456" [ 2154.601018] env[63279]: _type = "Task" [ 2154.601018] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2154.609020] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d061d9-3816-0a8f-cf9e-0dc070acc456, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2154.638874] env[63279]: DEBUG nova.objects.base [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2154.639122] env[63279]: DEBUG nova.network.neutron [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2154.674320] env[63279]: DEBUG oslo_concurrency.lockutils [None req-403f6708-872b-40c5-9d44-2368cd15b0f1 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "4a9088e0-2992-4b18-8be9-6bc70633369b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.212s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2154.735639] env[63279]: DEBUG nova.policy [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6343b207f7294f5fa2a8111940083fb0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5b21bc5072e4945a19a782dd9561709', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2154.820897] env[63279]: DEBUG nova.compute.manager [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2155.112498] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Preparing fetch location {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2155.112768] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Fetch image to [datastore1] OSTACK_IMG_b6c868df-78dc-489b-8164-a7a2ef5181d6/OSTACK_IMG_b6c868df-78dc-489b-8164-a7a2ef5181d6.vmdk {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2155.112953] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Downloading stream optimized image d2d04a47-fc99-4ff0-adbc-a27616252dd0 to [datastore1] OSTACK_IMG_b6c868df-78dc-489b-8164-a7a2ef5181d6/OSTACK_IMG_b6c868df-78dc-489b-8164-a7a2ef5181d6.vmdk on the data store datastore1 as vApp {{(pid=63279) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2155.113137] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Downloading image file data d2d04a47-fc99-4ff0-adbc-a27616252dd0 to the ESX as VM named 'OSTACK_IMG_b6c868df-78dc-489b-8164-a7a2ef5181d6' {{(pid=63279) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2155.215093] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2155.215093] env[63279]: value = "resgroup-9" [ 2155.215093] env[63279]: _type = "ResourcePool" [ 2155.215093] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2155.216625] env[63279]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-31c840ba-a1bb-4805-9587-3ceb30852787 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.240260] env[63279]: DEBUG nova.network.neutron [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Successfully created port: 753324ac-cff9-4577-92da-27cf8cceab06 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2155.250029] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lease: (returnval){ [ 2155.250029] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52899d9d-fed4-6ad3-2a09-69f31a0509e3" [ 2155.250029] env[63279]: _type = "HttpNfcLease" [ 2155.250029] env[63279]: } obtained for vApp import into resource pool (val){ [ 2155.250029] env[63279]: value = "resgroup-9" [ 2155.250029] env[63279]: _type = "ResourcePool" [ 2155.250029] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2155.250029] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the lease: (returnval){ [ 2155.250029] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52899d9d-fed4-6ad3-2a09-69f31a0509e3" [ 2155.250029] env[63279]: _type = "HttpNfcLease" [ 2155.250029] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2155.259347] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2155.259347] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52899d9d-fed4-6ad3-2a09-69f31a0509e3" [ 2155.259347] env[63279]: _type = "HttpNfcLease" [ 2155.259347] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2155.353661] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2155.440620] env[63279]: DEBUG nova.compute.manager [req-7dc6133d-f96f-462e-b3bf-5be5ea609f43 req-d108905a-df56-46f5-bdc0-126d6ee96904 service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Received event network-vif-deleted-d88bfcf3-27a0-4cbc-b13c-12cb6a612415 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2155.440852] env[63279]: INFO nova.compute.manager [req-7dc6133d-f96f-462e-b3bf-5be5ea609f43 req-d108905a-df56-46f5-bdc0-126d6ee96904 service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Neutron deleted interface d88bfcf3-27a0-4cbc-b13c-12cb6a612415; detaching it from the instance and deleting it from the info cache [ 2155.441075] env[63279]: DEBUG nova.network.neutron [req-7dc6133d-f96f-462e-b3bf-5be5ea609f43 req-d108905a-df56-46f5-bdc0-126d6ee96904 service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2155.465041] env[63279]: INFO nova.compute.manager [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Unrescuing [ 2155.465343] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "refresh_cache-4ce17bdb-4bed-4e06-af13-e4097b55e17d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2155.465572] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "refresh_cache-4ce17bdb-4bed-4e06-af13-e4097b55e17d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2155.465801] env[63279]: DEBUG nova.network.neutron [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2155.532393] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9d1df1-7626-4c92-ab68-22d5f5806a34 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.540182] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7394a59-749e-49c3-9c5b-755f3c192719 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.572741] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40b7cf3-9915-4cd4-8354-9e5a9b78df48 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.581100] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda71653-e87c-4689-81fc-00718be27d61 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.595153] env[63279]: DEBUG nova.compute.provider_tree [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2155.598123] env[63279]: DEBUG nova.network.neutron [-] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2155.665045] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2155.665304] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2155.665532] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Deleting the datastore file [datastore1] 5b5f87cb-cf35-418f-b5bd-b953524a285c {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2155.665796] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0318a4d9-20d0-4cb9-8951-4dbb8af8364a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.672440] env[63279]: DEBUG oslo_vmware.api [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for the task: (returnval){ [ 2155.672440] env[63279]: value = "task-2087641" [ 2155.672440] env[63279]: _type = "Task" [ 2155.672440] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2155.680796] env[63279]: DEBUG oslo_vmware.api [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087641, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2155.758674] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2155.758674] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52899d9d-fed4-6ad3-2a09-69f31a0509e3" [ 2155.758674] env[63279]: _type = "HttpNfcLease" [ 2155.758674] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2155.854698] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2155.855117] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2155.855444] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2155.855716] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2155.856010] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2155.858279] env[63279]: INFO nova.compute.manager [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Terminating instance [ 2155.946883] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b7bf20a-90c2-48d2-9651-58631a28043d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.958733] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aedb84a0-ff3c-4690-8d5a-da71836a1c7b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.994312] env[63279]: DEBUG nova.compute.manager [req-7dc6133d-f96f-462e-b3bf-5be5ea609f43 req-d108905a-df56-46f5-bdc0-126d6ee96904 service nova] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Detach interface failed, port_id=d88bfcf3-27a0-4cbc-b13c-12cb6a612415, reason: Instance 32dbef6d-d314-4fa6-972a-e7b1f22eb11d could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2156.098414] env[63279]: DEBUG nova.scheduler.client.report [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2156.102033] env[63279]: INFO nova.compute.manager [-] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Took 1.87 seconds to deallocate network for instance. [ 2156.183802] env[63279]: DEBUG oslo_vmware.api [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087641, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2156.233622] env[63279]: DEBUG nova.network.neutron [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Updating instance_info_cache with network_info: [{"id": "72b975f4-3f92-4d65-8d8f-119eeb59c2ab", "address": "fa:16:3e:f0:d6:f2", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72b975f4-3f", "ovs_interfaceid": "72b975f4-3f92-4d65-8d8f-119eeb59c2ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2156.258929] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2156.258929] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52899d9d-fed4-6ad3-2a09-69f31a0509e3" [ 2156.258929] env[63279]: _type = "HttpNfcLease" [ 2156.258929] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2156.363488] env[63279]: DEBUG nova.compute.manager [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2156.363790] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2156.364761] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69804083-09a8-4d31-b997-9a042e2b116d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.373518] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2156.373795] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-216f2fe6-2571-42a4-a708-c2bd3d6548f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.381443] env[63279]: DEBUG oslo_vmware.api [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2156.381443] env[63279]: value = "task-2087642" [ 2156.381443] env[63279]: _type = "Task" [ 2156.381443] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2156.389745] env[63279]: DEBUG oslo_vmware.api [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087642, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2156.438853] env[63279]: INFO nova.compute.manager [None req-e9065ddc-cad5-41a4-aca0-4a89875ea722 tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Get console output [ 2156.439490] env[63279]: WARNING nova.virt.vmwareapi.driver [None req-e9065ddc-cad5-41a4-aca0-4a89875ea722 tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] The console log is missing. Check your VSPC configuration [ 2156.604156] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.463s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2156.604815] env[63279]: DEBUG nova.compute.manager [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2156.608440] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.875s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2156.610169] env[63279]: INFO nova.compute.claims [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2156.614201] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2156.684352] env[63279]: DEBUG oslo_vmware.api [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Task: {'id': task-2087641, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.586612} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2156.684558] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2156.684766] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2156.684994] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2156.685265] env[63279]: INFO nova.compute.manager [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Took 3.18 seconds to destroy the instance on the hypervisor. [ 2156.685593] env[63279]: DEBUG oslo.service.loopingcall [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2156.685875] env[63279]: DEBUG nova.compute.manager [-] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2156.686042] env[63279]: DEBUG nova.network.neutron [-] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2156.737859] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "refresh_cache-4ce17bdb-4bed-4e06-af13-e4097b55e17d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2156.738429] env[63279]: DEBUG nova.objects.instance [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lazy-loading 'flavor' on Instance uuid 4ce17bdb-4bed-4e06-af13-e4097b55e17d {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2156.740437] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d865c7f-e053-4cb8-9ed5-2a58b56ecf88 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.749834] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d76172fe-0a30-49a4-88fe-65e26705614d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Suspending the VM {{(pid=63279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2156.749834] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-77405adc-edf7-461e-84c4-c853ad5f5569 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.758731] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2156.758731] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52899d9d-fed4-6ad3-2a09-69f31a0509e3" [ 2156.758731] env[63279]: _type = "HttpNfcLease" [ 2156.758731] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2156.760468] env[63279]: DEBUG oslo_vmware.api [None req-d76172fe-0a30-49a4-88fe-65e26705614d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2156.760468] env[63279]: value = "task-2087643" [ 2156.760468] env[63279]: _type = "Task" [ 2156.760468] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2156.768635] env[63279]: DEBUG oslo_vmware.api [None req-d76172fe-0a30-49a4-88fe-65e26705614d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087643, 'name': SuspendVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2156.880144] env[63279]: DEBUG nova.network.neutron [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Successfully updated port: 753324ac-cff9-4577-92da-27cf8cceab06 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2156.893102] env[63279]: DEBUG oslo_vmware.api [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087642, 'name': PowerOffVM_Task, 'duration_secs': 0.38142} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2156.894165] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2156.894369] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2156.894764] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-effa9f33-1882-450d-a94d-5989299fd59a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.109956] env[63279]: DEBUG nova.compute.utils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2157.111684] env[63279]: DEBUG nova.compute.manager [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2157.111891] env[63279]: DEBUG nova.network.neutron [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2157.164758] env[63279]: DEBUG nova.policy [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab1648b445ab458d92404e3a5ddb8619', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c98de1240f14b058e58f6a707096ef1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2157.249880] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e93d02-a9df-4c3e-9493-9a46195c2ce6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.277933] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2157.282751] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb937a57-d43a-47df-b049-75c414dd28b4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.284349] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2157.284349] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52899d9d-fed4-6ad3-2a09-69f31a0509e3" [ 2157.284349] env[63279]: _type = "HttpNfcLease" [ 2157.284349] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2157.291332] env[63279]: DEBUG oslo_vmware.api [None req-d76172fe-0a30-49a4-88fe-65e26705614d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087643, 'name': SuspendVM_Task} progress is 45%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2157.292687] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2157.292687] env[63279]: value = "task-2087645" [ 2157.292687] env[63279]: _type = "Task" [ 2157.292687] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2157.301150] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087645, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2157.384324] env[63279]: DEBUG oslo_concurrency.lockutils [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2157.384324] env[63279]: DEBUG oslo_concurrency.lockutils [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2157.384324] env[63279]: DEBUG nova.network.neutron [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2157.425395] env[63279]: DEBUG nova.network.neutron [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Successfully created port: 5a61eb6f-d235-4f00-a65b-76f20c2c3d8d {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2157.497463] env[63279]: DEBUG nova.network.neutron [-] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2157.617845] env[63279]: DEBUG nova.compute.manager [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2157.709618] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2157.709943] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2157.710172] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleting the datastore file [datastore1] 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2157.711564] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c43802a6-7d64-4c14-800b-80347cf12f11 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.718007] env[63279]: DEBUG oslo_vmware.api [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for the task: (returnval){ [ 2157.718007] env[63279]: value = "task-2087646" [ 2157.718007] env[63279]: _type = "Task" [ 2157.718007] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2157.730075] env[63279]: DEBUG oslo_vmware.api [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2157.761564] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2157.761564] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52899d9d-fed4-6ad3-2a09-69f31a0509e3" [ 2157.761564] env[63279]: _type = "HttpNfcLease" [ 2157.761564] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2157.788756] env[63279]: DEBUG oslo_vmware.api [None req-d76172fe-0a30-49a4-88fe-65e26705614d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087643, 'name': SuspendVM_Task} progress is 87%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2157.801972] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087645, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2157.922111] env[63279]: WARNING nova.network.neutron [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] e0e614b7-de4b-485e-8824-582faae8febd already exists in list: networks containing: ['e0e614b7-de4b-485e-8824-582faae8febd']. ignoring it [ 2157.936979] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b9f809-91af-418d-aed8-1a5d57948874 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.944737] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be056779-6197-4fea-a02d-f2a2a8afb58d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.977945] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961161f1-9a02-4496-ac00-ebe703832bb4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.986955] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a51e41-3d89-4de4-8276-e0228ee2ac88 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.004035] env[63279]: INFO nova.compute.manager [-] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Took 1.32 seconds to deallocate network for instance. [ 2158.004584] env[63279]: DEBUG nova.compute.provider_tree [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2158.078526] env[63279]: DEBUG nova.compute.manager [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Received event network-vif-plugged-753324ac-cff9-4577-92da-27cf8cceab06 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2158.078762] env[63279]: DEBUG oslo_concurrency.lockutils [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] Acquiring lock "eca98392-98be-405b-b799-463ef9ee3dc8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.078975] env[63279]: DEBUG oslo_concurrency.lockutils [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] Lock "eca98392-98be-405b-b799-463ef9ee3dc8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.079196] env[63279]: DEBUG oslo_concurrency.lockutils [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] Lock "eca98392-98be-405b-b799-463ef9ee3dc8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2158.079367] env[63279]: DEBUG nova.compute.manager [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] No waiting events found dispatching network-vif-plugged-753324ac-cff9-4577-92da-27cf8cceab06 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2158.079567] env[63279]: WARNING nova.compute.manager [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Received unexpected event network-vif-plugged-753324ac-cff9-4577-92da-27cf8cceab06 for instance with vm_state active and task_state None. [ 2158.079778] env[63279]: DEBUG nova.compute.manager [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Received event network-changed-753324ac-cff9-4577-92da-27cf8cceab06 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2158.079959] env[63279]: DEBUG nova.compute.manager [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Refreshing instance network info cache due to event network-changed-753324ac-cff9-4577-92da-27cf8cceab06. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2158.080171] env[63279]: DEBUG oslo_concurrency.lockutils [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] Acquiring lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2158.206252] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Acquiring lock "f375b54b-f9de-4529-b752-52c240aed532" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.206512] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Lock "f375b54b-f9de-4529-b752-52c240aed532" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.206719] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Acquiring lock "f375b54b-f9de-4529-b752-52c240aed532-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.206903] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Lock "f375b54b-f9de-4529-b752-52c240aed532-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.207092] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Lock "f375b54b-f9de-4529-b752-52c240aed532-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2158.209666] env[63279]: INFO nova.compute.manager [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Terminating instance [ 2158.230749] env[63279]: DEBUG oslo_vmware.api [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.263017] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2158.263017] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52899d9d-fed4-6ad3-2a09-69f31a0509e3" [ 2158.263017] env[63279]: _type = "HttpNfcLease" [ 2158.263017] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2158.263435] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2158.263435] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52899d9d-fed4-6ad3-2a09-69f31a0509e3" [ 2158.263435] env[63279]: _type = "HttpNfcLease" [ 2158.263435] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2158.264295] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d498bb18-7909-4c06-9ef4-8117a7293ed7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.272374] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293819a-8485-91f2-4d6e-a3ef87cdaa6d/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2158.272435] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293819a-8485-91f2-4d6e-a3ef87cdaa6d/disk-0.vmdk. {{(pid=63279) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2158.331861] env[63279]: DEBUG nova.network.neutron [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Updating instance_info_cache with network_info: [{"id": "d609f907-0c7e-4293-8b21-721e712e8dc2", "address": "fa:16:3e:0e:eb:3d", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd609f907-0c", "ovs_interfaceid": "d609f907-0c7e-4293-8b21-721e712e8dc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "753324ac-cff9-4577-92da-27cf8cceab06", "address": "fa:16:3e:58:99:55", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap753324ac-cf", "ovs_interfaceid": "753324ac-cff9-4577-92da-27cf8cceab06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2158.351943] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-79b651ec-5eac-433f-9b68-4be1872c3d55 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.353720] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087645, 'name': PowerOffVM_Task, 'duration_secs': 0.870717} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2158.353962] env[63279]: DEBUG oslo_vmware.api [None req-d76172fe-0a30-49a4-88fe-65e26705614d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087643, 'name': SuspendVM_Task} progress is 87%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.358846] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2158.360198] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Reconfiguring VM instance instance-0000004b to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2158.361157] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca977704-82bf-4906-9a70-6311d11deaa2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.381854] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2158.381854] env[63279]: value = "task-2087647" [ 2158.381854] env[63279]: _type = "Task" [ 2158.381854] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.390567] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087647, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.511790] env[63279]: DEBUG nova.scheduler.client.report [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2158.515951] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.631897] env[63279]: DEBUG nova.compute.manager [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2158.714661] env[63279]: DEBUG nova.compute.manager [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2158.714978] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2158.715323] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-574fc99e-3267-4307-9a19-41e740da4060 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.724075] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Waiting for the task: (returnval){ [ 2158.724075] env[63279]: value = "task-2087648" [ 2158.724075] env[63279]: _type = "Task" [ 2158.724075] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.730448] env[63279]: DEBUG oslo_vmware.api [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Task: {'id': task-2087646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.517537} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2158.730988] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2158.731207] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2158.731390] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2158.731580] env[63279]: INFO nova.compute.manager [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Took 2.37 seconds to destroy the instance on the hypervisor. [ 2158.732388] env[63279]: DEBUG oslo.service.loopingcall [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2158.735585] env[63279]: DEBUG nova.compute.manager [-] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2158.735701] env[63279]: DEBUG nova.network.neutron [-] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2158.737428] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087648, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.841628] env[63279]: DEBUG oslo_concurrency.lockutils [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2158.842247] env[63279]: DEBUG oslo_concurrency.lockutils [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2158.842400] env[63279]: DEBUG oslo_concurrency.lockutils [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2158.842682] env[63279]: DEBUG oslo_vmware.api [None req-d76172fe-0a30-49a4-88fe-65e26705614d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087643, 'name': SuspendVM_Task, 'duration_secs': 1.727344} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2158.842897] env[63279]: DEBUG oslo_concurrency.lockutils [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] Acquired lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2158.843088] env[63279]: DEBUG nova.network.neutron [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Refreshing network info cache for port 753324ac-cff9-4577-92da-27cf8cceab06 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2158.844906] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084d4d39-1e69-430c-a0cb-84c9109ef31c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.847794] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d76172fe-0a30-49a4-88fe-65e26705614d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Suspended the VM {{(pid=63279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2158.847975] env[63279]: DEBUG nova.compute.manager [None req-d76172fe-0a30-49a4-88fe-65e26705614d tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2158.852311] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1046297b-f256-457b-aae3-055d191a2aa1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.878458] env[63279]: DEBUG nova.virt.hardware [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2158.878806] env[63279]: DEBUG nova.virt.hardware [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2158.879015] env[63279]: DEBUG nova.virt.hardware [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2158.879272] env[63279]: DEBUG nova.virt.hardware [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2158.879796] env[63279]: DEBUG nova.virt.hardware [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2158.880043] env[63279]: DEBUG nova.virt.hardware [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2158.880455] env[63279]: DEBUG nova.virt.hardware [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2158.880665] env[63279]: DEBUG nova.virt.hardware [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2158.880900] env[63279]: DEBUG nova.virt.hardware [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2158.881294] env[63279]: DEBUG nova.virt.hardware [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2158.881535] env[63279]: DEBUG nova.virt.hardware [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2158.889510] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Reconfiguring VM to attach interface {{(pid=63279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 2158.892213] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62806428-5c10-4d81-9af6-6092b017fbac {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.926337] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087647, 'name': ReconfigVM_Task, 'duration_secs': 0.46855} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2158.927698] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Reconfigured VM instance instance-0000004b to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2158.927900] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2158.928256] env[63279]: DEBUG oslo_vmware.api [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2158.928256] env[63279]: value = "task-2087649" [ 2158.928256] env[63279]: _type = "Task" [ 2158.928256] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.929937] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-618c5690-d855-4e53-b7b1-217ac97e8a66 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.941277] env[63279]: DEBUG oslo_vmware.api [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087649, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.943928] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2158.943928] env[63279]: value = "task-2087650" [ 2158.943928] env[63279]: _type = "Task" [ 2158.943928] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.952680] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087650, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.017891] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.409s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.018512] env[63279]: DEBUG nova.compute.manager [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2159.023392] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.877s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.023682] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.025790] env[63279]: DEBUG oslo_concurrency.lockutils [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.885s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.026050] env[63279]: DEBUG nova.objects.instance [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lazy-loading 'resources' on Instance uuid 5656c853-ac83-47be-83c4-979a9e87ab91 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2159.057673] env[63279]: INFO nova.scheduler.client.report [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleted allocations for instance b981ac83-6c23-4d44-bd28-12da30d746bd [ 2159.080970] env[63279]: DEBUG nova.virt.hardware [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2159.088019] env[63279]: DEBUG nova.virt.hardware [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2159.088019] env[63279]: DEBUG nova.virt.hardware [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2159.088019] env[63279]: DEBUG nova.virt.hardware [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2159.088019] env[63279]: DEBUG nova.virt.hardware [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2159.088019] env[63279]: DEBUG nova.virt.hardware [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2159.088019] env[63279]: DEBUG nova.virt.hardware [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2159.088019] env[63279]: DEBUG nova.virt.hardware [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2159.088019] env[63279]: DEBUG nova.virt.hardware [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2159.088019] env[63279]: DEBUG nova.virt.hardware [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2159.088019] env[63279]: DEBUG nova.virt.hardware [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2159.088019] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49b0b3f-41a3-4bda-84ba-b5ed02802422 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.096035] env[63279]: DEBUG nova.network.neutron [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Successfully updated port: 5a61eb6f-d235-4f00-a65b-76f20c2c3d8d {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2159.104773] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6419c750-186e-4727-8c1d-d9399ee523f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.234248] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087648, 'name': PowerOffVM_Task, 'duration_secs': 0.322782} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.234546] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2159.234752] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2159.234950] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427536', 'volume_id': '29ec2591-5e66-4668-ba47-c425dc65074c', 'name': 'volume-29ec2591-5e66-4668-ba47-c425dc65074c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f375b54b-f9de-4529-b752-52c240aed532', 'attached_at': '', 'detached_at': '', 'volume_id': '29ec2591-5e66-4668-ba47-c425dc65074c', 'serial': '29ec2591-5e66-4668-ba47-c425dc65074c'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2159.237215] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9903c2b-9d34-4d2f-8786-09902f5bd518 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.263411] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782e72c6-780d-4ae9-826a-6602e6d86a9f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.271580] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b1485a-c0d7-4298-bcb0-466319a577b4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.292437] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480da7e3-b9e9-4b5a-b905-0ea8347a6f3e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.309572] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] The volume has not been displaced from its original location: [datastore1] volume-29ec2591-5e66-4668-ba47-c425dc65074c/volume-29ec2591-5e66-4668-ba47-c425dc65074c.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2159.315320] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Reconfiguring VM instance instance-00000024 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2159.319928] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-391d1408-763c-43ab-89fb-5caca6a1b07d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.339394] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Waiting for the task: (returnval){ [ 2159.339394] env[63279]: value = "task-2087651" [ 2159.339394] env[63279]: _type = "Task" [ 2159.339394] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.349627] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087651, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.444221] env[63279]: DEBUG oslo_vmware.api [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087649, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.454457] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087650, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.529050] env[63279]: DEBUG nova.compute.utils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2159.534810] env[63279]: DEBUG nova.compute.manager [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2159.535022] env[63279]: DEBUG nova.network.neutron [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2159.572402] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ad336584-5e82-4338-9e96-d69e82f0c758 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "b981ac83-6c23-4d44-bd28-12da30d746bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.291s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.593033] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Completed reading data from the image iterator. {{(pid=63279) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2159.593282] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293819a-8485-91f2-4d6e-a3ef87cdaa6d/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2159.594463] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab0644a-a489-4853-a81d-d264b2f01f8b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.602023] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2159.602252] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2159.602411] env[63279]: DEBUG nova.network.neutron [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2159.607383] env[63279]: DEBUG nova.policy [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'efd8e2a39ad843b79762189fcc8aa8ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '996f8d6e14a14ac39f207eced547ef33', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2159.612439] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293819a-8485-91f2-4d6e-a3ef87cdaa6d/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2159.612439] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293819a-8485-91f2-4d6e-a3ef87cdaa6d/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2159.612439] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-180d4671-27cd-4dc8-a30e-2ded9bf48159 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.664340] env[63279]: DEBUG nova.network.neutron [-] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2159.682520] env[63279]: DEBUG nova.network.neutron [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Updated VIF entry in instance network info cache for port 753324ac-cff9-4577-92da-27cf8cceab06. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2159.682520] env[63279]: DEBUG nova.network.neutron [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Updating instance_info_cache with network_info: [{"id": "d609f907-0c7e-4293-8b21-721e712e8dc2", "address": "fa:16:3e:0e:eb:3d", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd609f907-0c", "ovs_interfaceid": "d609f907-0c7e-4293-8b21-721e712e8dc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "753324ac-cff9-4577-92da-27cf8cceab06", "address": "fa:16:3e:58:99:55", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap753324ac-cf", "ovs_interfaceid": "753324ac-cff9-4577-92da-27cf8cceab06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2159.817864] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a32bf65-8094-4909-9e98-22e25c7e515f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.825726] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e61bcc-cb68-4527-bdd3-fcbd99df76e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.863115] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25b4e43-3f3f-4b25-8431-6659a7ac413f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.872583] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087651, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.873845] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6c5788-23a4-4cf1-ad63-1fc5c05a4065 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.888546] env[63279]: DEBUG nova.compute.provider_tree [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2159.944149] env[63279]: DEBUG oslo_vmware.api [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087649, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.954245] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087650, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.034586] env[63279]: DEBUG nova.compute.manager [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2160.148728] env[63279]: DEBUG nova.network.neutron [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2160.166483] env[63279]: INFO nova.compute.manager [-] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Took 1.43 seconds to deallocate network for instance. [ 2160.182343] env[63279]: DEBUG nova.network.neutron [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Successfully created port: 55a22eb2-68fd-4cc1-8372-6fed483f16d0 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2160.184599] env[63279]: DEBUG oslo_concurrency.lockutils [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] Releasing lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2160.184837] env[63279]: DEBUG nova.compute.manager [req-277e746e-4256-41c0-bb4d-5d9212582271 req-f8b928dc-519e-4011-aaed-9938eb0905c3 service nova] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Received event network-vif-deleted-5576a5f4-6c18-428c-82f9-6cedc1a2b828 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2160.310870] env[63279]: DEBUG nova.compute.manager [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Received event network-vif-plugged-5a61eb6f-d235-4f00-a65b-76f20c2c3d8d {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2160.311101] env[63279]: DEBUG oslo_concurrency.lockutils [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] Acquiring lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2160.311328] env[63279]: DEBUG oslo_concurrency.lockutils [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] Lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2160.311493] env[63279]: DEBUG oslo_concurrency.lockutils [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] Lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.311693] env[63279]: DEBUG nova.compute.manager [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] No waiting events found dispatching network-vif-plugged-5a61eb6f-d235-4f00-a65b-76f20c2c3d8d {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2160.311863] env[63279]: WARNING nova.compute.manager [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Received unexpected event network-vif-plugged-5a61eb6f-d235-4f00-a65b-76f20c2c3d8d for instance with vm_state building and task_state spawning. [ 2160.312053] env[63279]: DEBUG nova.compute.manager [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Received event network-changed-5a61eb6f-d235-4f00-a65b-76f20c2c3d8d {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2160.312251] env[63279]: DEBUG nova.compute.manager [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Refreshing instance network info cache due to event network-changed-5a61eb6f-d235-4f00-a65b-76f20c2c3d8d. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2160.312449] env[63279]: DEBUG oslo_concurrency.lockutils [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] Acquiring lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2160.365458] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087651, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.388521] env[63279]: DEBUG nova.network.neutron [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updating instance_info_cache with network_info: [{"id": "5a61eb6f-d235-4f00-a65b-76f20c2c3d8d", "address": "fa:16:3e:b2:2a:f4", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a61eb6f-d2", "ovs_interfaceid": "5a61eb6f-d235-4f00-a65b-76f20c2c3d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2160.391753] env[63279]: DEBUG nova.scheduler.client.report [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2160.445038] env[63279]: DEBUG oslo_vmware.api [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087649, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.455320] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087650, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.490702] env[63279]: INFO nova.compute.manager [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Resuming [ 2160.491396] env[63279]: DEBUG nova.objects.instance [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lazy-loading 'flavor' on Instance uuid 2f5e22f6-ba70-4848-965b-eb1553115323 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2160.676253] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2160.865709] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087651, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.895027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2160.895027] env[63279]: DEBUG nova.compute.manager [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Instance network_info: |[{"id": "5a61eb6f-d235-4f00-a65b-76f20c2c3d8d", "address": "fa:16:3e:b2:2a:f4", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a61eb6f-d2", "ovs_interfaceid": "5a61eb6f-d235-4f00-a65b-76f20c2c3d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2160.895027] env[63279]: DEBUG oslo_concurrency.lockutils [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] Acquired lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2160.895027] env[63279]: DEBUG nova.network.neutron [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Refreshing network info cache for port 5a61eb6f-d235-4f00-a65b-76f20c2c3d8d {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2160.895027] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:2a:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a61eb6f-d235-4f00-a65b-76f20c2c3d8d', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2160.912027] env[63279]: DEBUG oslo.service.loopingcall [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2160.914121] env[63279]: DEBUG oslo_concurrency.lockutils [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.916210] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2160.917094] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.564s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2160.919017] env[63279]: INFO nova.compute.claims [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2160.921702] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c0a5c7c-c8ba-436d-843a-2a9738f908cb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.939134] env[63279]: INFO nova.scheduler.client.report [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Deleted allocations for instance 5656c853-ac83-47be-83c4-979a9e87ab91 [ 2160.951993] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2160.951993] env[63279]: value = "task-2087652" [ 2160.951993] env[63279]: _type = "Task" [ 2160.951993] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.957223] env[63279]: DEBUG oslo_vmware.api [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087649, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.963935] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087650, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.972337] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087652, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.050055] env[63279]: DEBUG nova.compute.manager [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2161.079255] env[63279]: DEBUG nova.virt.hardware [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2161.079671] env[63279]: DEBUG nova.virt.hardware [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2161.079947] env[63279]: DEBUG nova.virt.hardware [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2161.080306] env[63279]: DEBUG nova.virt.hardware [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2161.080581] env[63279]: DEBUG nova.virt.hardware [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2161.080861] env[63279]: DEBUG nova.virt.hardware [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2161.081237] env[63279]: DEBUG nova.virt.hardware [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2161.081532] env[63279]: DEBUG nova.virt.hardware [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2161.081802] env[63279]: DEBUG nova.virt.hardware [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2161.082156] env[63279]: DEBUG nova.virt.hardware [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2161.082419] env[63279]: DEBUG nova.virt.hardware [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2161.084419] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72b24b1-af4c-4125-8e59-36bd72470ea1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.096599] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef378c1-cc39-4b0f-a4ea-b6b65870861b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.163157] env[63279]: DEBUG nova.network.neutron [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updated VIF entry in instance network info cache for port 5a61eb6f-d235-4f00-a65b-76f20c2c3d8d. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2161.163553] env[63279]: DEBUG nova.network.neutron [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updating instance_info_cache with network_info: [{"id": "5a61eb6f-d235-4f00-a65b-76f20c2c3d8d", "address": "fa:16:3e:b2:2a:f4", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a61eb6f-d2", "ovs_interfaceid": "5a61eb6f-d235-4f00-a65b-76f20c2c3d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2161.366432] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087651, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.452054] env[63279]: DEBUG oslo_vmware.api [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087649, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.452516] env[63279]: DEBUG oslo_concurrency.lockutils [None req-76da4d21-f26f-47ac-ad08-4cf713f445bd tempest-MigrationsAdminTest-1378842037 tempest-MigrationsAdminTest-1378842037-project-member] Lock "5656c853-ac83-47be-83c4-979a9e87ab91" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.734s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2161.463858] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087650, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.469894] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087652, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.666826] env[63279]: DEBUG oslo_concurrency.lockutils [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] Releasing lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2161.667075] env[63279]: DEBUG nova.compute.manager [req-699c4d7b-01ff-47f9-8750-a0b630e21360 req-97ac9dce-2cad-4d6a-a5fd-68c21dda6c21 service nova] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Received event network-vif-deleted-bc9f3899-95c1-4e79-b121-03c9a2c0bc44 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2161.756688] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293819a-8485-91f2-4d6e-a3ef87cdaa6d/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2161.756985] env[63279]: INFO nova.virt.vmwareapi.images [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Downloaded image file data d2d04a47-fc99-4ff0-adbc-a27616252dd0 [ 2161.762131] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b061286-44af-4c12-8c18-120e7d8f2ad0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.778390] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ec5a6d8-97b5-48ab-82c2-adbb7b216ff1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.799410] env[63279]: DEBUG nova.network.neutron [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Successfully updated port: 55a22eb2-68fd-4cc1-8372-6fed483f16d0 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2161.809040] env[63279]: INFO nova.virt.vmwareapi.images [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] The imported VM was unregistered [ 2161.814462] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Caching image {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2161.814462] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Creating directory with path [datastore1] devstack-image-cache_base/d2d04a47-fc99-4ff0-adbc-a27616252dd0 {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2161.814462] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4af8b39-e515-4bf1-9c3a-a4fd36b614c3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.826746] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Created directory with path [datastore1] devstack-image-cache_base/d2d04a47-fc99-4ff0-adbc-a27616252dd0 {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2161.827550] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_b6c868df-78dc-489b-8164-a7a2ef5181d6/OSTACK_IMG_b6c868df-78dc-489b-8164-a7a2ef5181d6.vmdk to [datastore1] devstack-image-cache_base/d2d04a47-fc99-4ff0-adbc-a27616252dd0/d2d04a47-fc99-4ff0-adbc-a27616252dd0.vmdk. {{(pid=63279) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2161.827914] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-47581d1d-5d84-4be0-b30e-beb0dd98a262 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.837058] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2161.837058] env[63279]: value = "task-2087654" [ 2161.837058] env[63279]: _type = "Task" [ 2161.837058] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.846893] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087654, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.868800] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087651, 'name': ReconfigVM_Task, 'duration_secs': 2.435462} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2161.868800] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Reconfigured VM instance instance-00000024 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2161.873586] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cdbba21-5541-4f9e-a54a-6fe4165ca57c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.890229] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Waiting for the task: (returnval){ [ 2161.890229] env[63279]: value = "task-2087655" [ 2161.890229] env[63279]: _type = "Task" [ 2161.890229] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.901826] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087655, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.968691] env[63279]: DEBUG oslo_vmware.api [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087649, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.975707] env[63279]: DEBUG oslo_vmware.api [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087650, 'name': PowerOnVM_Task, 'duration_secs': 2.819076} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2161.982290] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2161.982555] env[63279]: DEBUG nova.compute.manager [None req-9fa89649-cf23-4a4b-9d0e-97ebfb015392 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2161.982836] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087652, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.983797] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc5fd53-3e82-4d1d-b28e-49231238bb0f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.001168] env[63279]: DEBUG oslo_concurrency.lockutils [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2162.001346] env[63279]: DEBUG oslo_concurrency.lockutils [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquired lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2162.001552] env[63279]: DEBUG nova.network.neutron [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2162.255702] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c388f403-795c-4198-8bec-a66152cc9f79 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.268323] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73530e4-5599-4da8-92a1-1ec720bd80ff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.305019] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2162.305192] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2162.305374] env[63279]: DEBUG nova.network.neutron [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2162.307596] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff3ab1f6-97ef-4191-b196-35348467eec7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.322201] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f90dce-88c7-4b1c-b20b-bf1f3c8b482e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.343666] env[63279]: DEBUG nova.compute.provider_tree [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2162.359830] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087654, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.410346] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087655, 'name': ReconfigVM_Task, 'duration_secs': 0.164673} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2162.410607] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427536', 'volume_id': '29ec2591-5e66-4668-ba47-c425dc65074c', 'name': 'volume-29ec2591-5e66-4668-ba47-c425dc65074c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f375b54b-f9de-4529-b752-52c240aed532', 'attached_at': '', 'detached_at': '', 'volume_id': '29ec2591-5e66-4668-ba47-c425dc65074c', 'serial': '29ec2591-5e66-4668-ba47-c425dc65074c'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2162.410909] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2162.411744] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aab0cd1-c54d-4f51-83a1-f46e013d990d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.423013] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2162.423375] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-adf1c811-cf09-45e6-99d5-6dfa05db85ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.451472] env[63279]: DEBUG oslo_vmware.api [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087649, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.474940] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087652, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.513520] env[63279]: DEBUG nova.compute.manager [req-f501714d-0d88-494b-b4e5-68c4a4bd0fcb req-9a0b631a-c21c-4f6d-b7b6-219a329901f1 service nova] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Received event network-vif-plugged-55a22eb2-68fd-4cc1-8372-6fed483f16d0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2162.513929] env[63279]: DEBUG oslo_concurrency.lockutils [req-f501714d-0d88-494b-b4e5-68c4a4bd0fcb req-9a0b631a-c21c-4f6d-b7b6-219a329901f1 service nova] Acquiring lock "686a7ce2-2d07-411e-91d6-0471c55c3728-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2162.514190] env[63279]: DEBUG oslo_concurrency.lockutils [req-f501714d-0d88-494b-b4e5-68c4a4bd0fcb req-9a0b631a-c21c-4f6d-b7b6-219a329901f1 service nova] Lock "686a7ce2-2d07-411e-91d6-0471c55c3728-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.514352] env[63279]: DEBUG oslo_concurrency.lockutils [req-f501714d-0d88-494b-b4e5-68c4a4bd0fcb req-9a0b631a-c21c-4f6d-b7b6-219a329901f1 service nova] Lock "686a7ce2-2d07-411e-91d6-0471c55c3728-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.514516] env[63279]: DEBUG nova.compute.manager [req-f501714d-0d88-494b-b4e5-68c4a4bd0fcb req-9a0b631a-c21c-4f6d-b7b6-219a329901f1 service nova] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] No waiting events found dispatching network-vif-plugged-55a22eb2-68fd-4cc1-8372-6fed483f16d0 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2162.514714] env[63279]: WARNING nova.compute.manager [req-f501714d-0d88-494b-b4e5-68c4a4bd0fcb req-9a0b631a-c21c-4f6d-b7b6-219a329901f1 service nova] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Received unexpected event network-vif-plugged-55a22eb2-68fd-4cc1-8372-6fed483f16d0 for instance with vm_state building and task_state spawning. [ 2162.514891] env[63279]: DEBUG nova.compute.manager [req-f501714d-0d88-494b-b4e5-68c4a4bd0fcb req-9a0b631a-c21c-4f6d-b7b6-219a329901f1 service nova] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Received event network-changed-55a22eb2-68fd-4cc1-8372-6fed483f16d0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2162.515111] env[63279]: DEBUG nova.compute.manager [req-f501714d-0d88-494b-b4e5-68c4a4bd0fcb req-9a0b631a-c21c-4f6d-b7b6-219a329901f1 service nova] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Refreshing instance network info cache due to event network-changed-55a22eb2-68fd-4cc1-8372-6fed483f16d0. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2162.515301] env[63279]: DEBUG oslo_concurrency.lockutils [req-f501714d-0d88-494b-b4e5-68c4a4bd0fcb req-9a0b631a-c21c-4f6d-b7b6-219a329901f1 service nova] Acquiring lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2162.728424] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2162.728648] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2162.728816] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Deleting the datastore file [datastore1] f375b54b-f9de-4529-b752-52c240aed532 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2162.729079] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de4d1c83-79ac-4fb9-9395-b0e739ab936e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.739856] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Waiting for the task: (returnval){ [ 2162.739856] env[63279]: value = "task-2087657" [ 2162.739856] env[63279]: _type = "Task" [ 2162.739856] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2162.757268] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087657, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.769725] env[63279]: DEBUG nova.network.neutron [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Updating instance_info_cache with network_info: [{"id": "8383ed07-21e7-43e0-82a2-0afc179ca995", "address": "fa:16:3e:b6:12:ab", "network": {"id": "a7b83e75-3b16-41db-9395-90dead128e80", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-756195345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd8bdc7d8664db698236f56d82adcf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8383ed07-21", "ovs_interfaceid": "8383ed07-21e7-43e0-82a2-0afc179ca995", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2162.838848] env[63279]: DEBUG nova.network.neutron [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2162.849892] env[63279]: DEBUG nova.scheduler.client.report [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2162.858585] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087654, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.953972] env[63279]: DEBUG oslo_vmware.api [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087649, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.982922] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087652, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.011565] env[63279]: DEBUG nova.network.neutron [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance_info_cache with network_info: [{"id": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "address": "fa:16:3e:6d:af:2f", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55a22eb2-68", "ovs_interfaceid": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2163.254781] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087657, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.273755] env[63279]: DEBUG oslo_concurrency.lockutils [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Releasing lock "refresh_cache-2f5e22f6-ba70-4848-965b-eb1553115323" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2163.274873] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5088f0a4-310a-41c7-8447-82d30bfd8496 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.285701] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Resuming the VM {{(pid=63279) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2163.285987] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2fb1fef7-b611-4cac-b40d-539aa911f1c9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.296369] env[63279]: DEBUG oslo_vmware.api [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2163.296369] env[63279]: value = "task-2087658" [ 2163.296369] env[63279]: _type = "Task" [ 2163.296369] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.310601] env[63279]: DEBUG oslo_vmware.api [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087658, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.355859] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087654, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.360528] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2163.360528] env[63279]: DEBUG nova.compute.manager [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2163.363102] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.749s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2163.363859] env[63279]: DEBUG nova.objects.instance [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Lazy-loading 'resources' on Instance uuid 32dbef6d-d314-4fa6-972a-e7b1f22eb11d {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2163.456341] env[63279]: DEBUG oslo_vmware.api [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087649, 'name': ReconfigVM_Task, 'duration_secs': 4.095276} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.457102] env[63279]: DEBUG oslo_concurrency.lockutils [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2163.457265] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Reconfigured VM to attach interface {{(pid=63279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 2163.479867] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087652, 'name': CreateVM_Task, 'duration_secs': 2.366122} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.480089] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2163.480741] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2163.480945] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2163.481298] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2163.481854] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d82fcc18-c13e-42ec-a520-2f271b158b89 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.490625] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2163.490625] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5236543d-d53a-66cd-0041-039e08e4c67c" [ 2163.490625] env[63279]: _type = "Task" [ 2163.490625] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.504505] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5236543d-d53a-66cd-0041-039e08e4c67c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.515037] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2163.515239] env[63279]: DEBUG nova.compute.manager [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Instance network_info: |[{"id": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "address": "fa:16:3e:6d:af:2f", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55a22eb2-68", "ovs_interfaceid": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2163.515602] env[63279]: DEBUG oslo_concurrency.lockutils [req-f501714d-0d88-494b-b4e5-68c4a4bd0fcb req-9a0b631a-c21c-4f6d-b7b6-219a329901f1 service nova] Acquired lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2163.515796] env[63279]: DEBUG nova.network.neutron [req-f501714d-0d88-494b-b4e5-68c4a4bd0fcb req-9a0b631a-c21c-4f6d-b7b6-219a329901f1 service nova] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Refreshing network info cache for port 55a22eb2-68fd-4cc1-8372-6fed483f16d0 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2163.517156] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:af:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55a22eb2-68fd-4cc1-8372-6fed483f16d0', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2163.526298] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Creating folder: Project (996f8d6e14a14ac39f207eced547ef33). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2163.527575] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84cc8dfe-bf5e-4f91-9ced-788b826b775f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.543262] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Created folder: Project (996f8d6e14a14ac39f207eced547ef33) in parent group-v427491. [ 2163.543527] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Creating folder: Instances. Parent ref: group-v427714. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2163.544077] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06b77181-e907-424c-a55a-7fea610dd545 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.558145] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Created folder: Instances in parent group-v427714. [ 2163.558506] env[63279]: DEBUG oslo.service.loopingcall [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2163.558639] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2163.558859] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9400cc2-51b3-4fbd-9677-eb6a8f5f26f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.583181] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2163.583181] env[63279]: value = "task-2087661" [ 2163.583181] env[63279]: _type = "Task" [ 2163.583181] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.595656] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087661, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.604951] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Acquiring lock "c8b42e3b-b841-4b79-a4f3-ef62577d4902" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2163.605276] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Lock "c8b42e3b-b841-4b79-a4f3-ef62577d4902" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2163.605510] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Acquiring lock "c8b42e3b-b841-4b79-a4f3-ef62577d4902-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2163.605839] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Lock "c8b42e3b-b841-4b79-a4f3-ef62577d4902-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2163.605928] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Lock "c8b42e3b-b841-4b79-a4f3-ef62577d4902-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2163.608267] env[63279]: INFO nova.compute.manager [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Terminating instance [ 2163.755029] env[63279]: DEBUG oslo_vmware.api [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Task: {'id': task-2087657, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.999632} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.755029] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2163.755260] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2163.755484] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2163.755659] env[63279]: INFO nova.compute.manager [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Took 5.04 seconds to destroy the instance on the hypervisor. [ 2163.755914] env[63279]: DEBUG oslo.service.loopingcall [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2163.756131] env[63279]: DEBUG nova.compute.manager [-] [instance: f375b54b-f9de-4529-b752-52c240aed532] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2163.756248] env[63279]: DEBUG nova.network.neutron [-] [instance: f375b54b-f9de-4529-b752-52c240aed532] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2163.812285] env[63279]: DEBUG oslo_vmware.api [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087658, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.859827] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087654, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.870043] env[63279]: DEBUG nova.compute.utils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2163.873286] env[63279]: DEBUG nova.compute.manager [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2163.873286] env[63279]: DEBUG nova.network.neutron [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2163.947339] env[63279]: DEBUG nova.policy [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d9d348f73904de5adc5d521c18bfb6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '161d1caf722349c188ca8fc647989c05', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2163.965619] env[63279]: DEBUG oslo_concurrency.lockutils [None req-45c03385-d84a-4fe1-8f36-6d1fe6e38b41 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-eca98392-98be-405b-b799-463ef9ee3dc8-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 10.456s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2164.016021] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5236543d-d53a-66cd-0041-039e08e4c67c, 'name': SearchDatastore_Task, 'duration_secs': 0.080491} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.016021] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2164.016021] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2164.016021] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2164.016021] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2164.016021] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2164.016021] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ea5e83d-c07f-4b57-b33a-a84ff7490c95 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.038058] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2164.038311] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2164.042208] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-328374e3-6ac2-4c3f-94dc-478beac0591a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.054634] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2164.054634] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a8c6c3-525c-691d-2fd5-baad8543af57" [ 2164.054634] env[63279]: _type = "Task" [ 2164.054634] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.069967] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a8c6c3-525c-691d-2fd5-baad8543af57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.098560] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087661, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.112853] env[63279]: DEBUG nova.compute.manager [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2164.113170] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2164.118123] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a28e15-55c5-4a93-ae4b-e07bb68149b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.128181] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2164.131933] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32c86793-a9a4-44bc-8282-01a9c0bf6a1d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.147116] env[63279]: DEBUG oslo_vmware.api [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Waiting for the task: (returnval){ [ 2164.147116] env[63279]: value = "task-2087662" [ 2164.147116] env[63279]: _type = "Task" [ 2164.147116] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.169108] env[63279]: DEBUG oslo_vmware.api [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': task-2087662, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.260613] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9c8221-260f-4ea9-a014-83c2ae931f9e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.281024] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ed4d98-9e05-453a-9d5f-ded6920c9679 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.330750] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee0f3e3-cd33-4958-9dcc-4881b34514c0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.345746] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1353d434-3444-4de9-b42b-2be237ef21ea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.351998] env[63279]: DEBUG oslo_vmware.api [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087658, 'name': PowerOnVM_Task} progress is 93%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.362138] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087654, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.372338] env[63279]: DEBUG nova.compute.provider_tree [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2164.376057] env[63279]: DEBUG nova.compute.manager [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2164.383825] env[63279]: DEBUG nova.network.neutron [req-f501714d-0d88-494b-b4e5-68c4a4bd0fcb req-9a0b631a-c21c-4f6d-b7b6-219a329901f1 service nova] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updated VIF entry in instance network info cache for port 55a22eb2-68fd-4cc1-8372-6fed483f16d0. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2164.384362] env[63279]: DEBUG nova.network.neutron [req-f501714d-0d88-494b-b4e5-68c4a4bd0fcb req-9a0b631a-c21c-4f6d-b7b6-219a329901f1 service nova] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance_info_cache with network_info: [{"id": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "address": "fa:16:3e:6d:af:2f", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55a22eb2-68", "ovs_interfaceid": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2164.530852] env[63279]: DEBUG nova.network.neutron [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Successfully created port: f5f17197-3d49-4ae2-a895-9bab23e07301 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2164.570844] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a8c6c3-525c-691d-2fd5-baad8543af57, 'name': SearchDatastore_Task, 'duration_secs': 0.084077} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.572020] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d38d810-d83d-4a55-b780-9ac3f5f259a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.583263] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2164.583263] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5296636c-3a25-6f08-d253-aabf2260722c" [ 2164.583263] env[63279]: _type = "Task" [ 2164.583263] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.598579] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087661, 'name': CreateVM_Task, 'duration_secs': 0.608445} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.602138] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2164.602296] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5296636c-3a25-6f08-d253-aabf2260722c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.603547] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2164.603547] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2164.603547] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2164.603825] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4840b0c-5962-4423-b305-f8032c87ed2c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.612906] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2164.612906] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]520325d3-f8e0-4a74-cf86-f3b93b8bab7c" [ 2164.612906] env[63279]: _type = "Task" [ 2164.612906] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.629523] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520325d3-f8e0-4a74-cf86-f3b93b8bab7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.657692] env[63279]: DEBUG oslo_vmware.api [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': task-2087662, 'name': PowerOffVM_Task, 'duration_secs': 0.280673} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.658030] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2164.658231] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2164.658523] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c024f95-03e5-4129-bb57-427e8694f06c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.700169] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521ff1c1-eb08-13e0-998f-ea5cbff36e1b/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2164.700809] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7596e540-2eeb-4939-8262-a37d9ab02ce8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.707443] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521ff1c1-eb08-13e0-998f-ea5cbff36e1b/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2164.708588] env[63279]: ERROR oslo_vmware.rw_handles [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521ff1c1-eb08-13e0-998f-ea5cbff36e1b/disk-0.vmdk due to incomplete transfer. [ 2164.709066] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b2e4094f-6450-433c-88c1-2cfcd018f67f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.719129] env[63279]: DEBUG oslo_vmware.rw_handles [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521ff1c1-eb08-13e0-998f-ea5cbff36e1b/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2164.719129] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Uploaded image a718bd62-8e3b-4c6b-8cc7-172dc4b359d0 to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2164.720080] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2164.720493] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b6698956-507c-4c07-86b0-3a1491c56173 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.726851] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2164.726851] env[63279]: value = "task-2087664" [ 2164.726851] env[63279]: _type = "Task" [ 2164.726851] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.735886] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.833827] env[63279]: DEBUG oslo_vmware.api [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087658, 'name': PowerOnVM_Task, 'duration_secs': 1.208758} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.834428] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Resumed the VM {{(pid=63279) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2164.838020] env[63279]: DEBUG nova.compute.manager [None req-10cf71d9-81cc-488e-a857-1d057f27f6e7 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2164.838020] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f2ad7f-a17b-4f0a-aff7-1b91581a0e7d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.868914] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087654, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.582041} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.870821] env[63279]: INFO nova.virt.vmwareapi.ds_util [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_b6c868df-78dc-489b-8164-a7a2ef5181d6/OSTACK_IMG_b6c868df-78dc-489b-8164-a7a2ef5181d6.vmdk to [datastore1] devstack-image-cache_base/d2d04a47-fc99-4ff0-adbc-a27616252dd0/d2d04a47-fc99-4ff0-adbc-a27616252dd0.vmdk. [ 2164.870821] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Cleaning up location [datastore1] OSTACK_IMG_b6c868df-78dc-489b-8164-a7a2ef5181d6 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2164.870821] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_b6c868df-78dc-489b-8164-a7a2ef5181d6 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2164.870821] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e1dafd5-afcb-4788-b098-2140e1b7298a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.880706] env[63279]: DEBUG nova.scheduler.client.report [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2164.891862] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2164.891862] env[63279]: value = "task-2087665" [ 2164.891862] env[63279]: _type = "Task" [ 2164.891862] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.893164] env[63279]: DEBUG oslo_concurrency.lockutils [req-f501714d-0d88-494b-b4e5-68c4a4bd0fcb req-9a0b631a-c21c-4f6d-b7b6-219a329901f1 service nova] Releasing lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2164.905737] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087665, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.916408] env[63279]: DEBUG nova.compute.manager [req-447fd7d6-857f-4b18-9983-978d5850b3ba req-b9dda67c-8b8c-40e3-9dbf-000da1a0b617 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Received event network-vif-deleted-7dc16370-0621-49ff-9730-abdfd18ff164 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2164.916633] env[63279]: INFO nova.compute.manager [req-447fd7d6-857f-4b18-9983-978d5850b3ba req-b9dda67c-8b8c-40e3-9dbf-000da1a0b617 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Neutron deleted interface 7dc16370-0621-49ff-9730-abdfd18ff164; detaching it from the instance and deleting it from the info cache [ 2164.917403] env[63279]: DEBUG nova.network.neutron [req-447fd7d6-857f-4b18-9983-978d5850b3ba req-b9dda67c-8b8c-40e3-9dbf-000da1a0b617 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2165.003751] env[63279]: DEBUG nova.network.neutron [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Successfully created port: 2053aee1-c71d-4f3d-99d3-989823b04e65 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2165.072077] env[63279]: DEBUG nova.network.neutron [-] [instance: f375b54b-f9de-4529-b752-52c240aed532] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2165.096207] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5296636c-3a25-6f08-d253-aabf2260722c, 'name': SearchDatastore_Task, 'duration_secs': 0.017969} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2165.096613] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2165.097093] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0/736ab3ac-b9e0-4f9e-885b-765ca7a92ed0.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2165.097428] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b286b3c-de36-4f59-b606-1e78fcb58791 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.104517] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2165.104517] env[63279]: value = "task-2087666" [ 2165.104517] env[63279]: _type = "Task" [ 2165.104517] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.118859] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.126305] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520325d3-f8e0-4a74-cf86-f3b93b8bab7c, 'name': SearchDatastore_Task, 'duration_secs': 0.026246} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2165.126687] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2165.127201] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2165.127480] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2165.127664] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2165.127894] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2165.128562] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2404a66-71ac-4b26-9a7c-9907d46e86c8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.138037] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2165.138037] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2165.138037] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2940039-c052-4b5d-92e3-eef4cf23cea8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.144194] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2165.144194] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52dc4895-29ba-f9ae-a6db-1bc3211836f9" [ 2165.144194] env[63279]: _type = "Task" [ 2165.144194] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.153733] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52dc4895-29ba-f9ae-a6db-1bc3211836f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.243854] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.389026] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.025s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.391522] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.875s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.391973] env[63279]: DEBUG nova.objects.instance [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lazy-loading 'resources' on Instance uuid 5b5f87cb-cf35-418f-b5bd-b953524a285c {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2165.395789] env[63279]: DEBUG nova.compute.manager [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2165.422251] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087665, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.03749} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2165.423092] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2165.423343] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d2d04a47-fc99-4ff0-adbc-a27616252dd0/d2d04a47-fc99-4ff0-adbc-a27616252dd0.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2165.426037] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d2d04a47-fc99-4ff0-adbc-a27616252dd0/d2d04a47-fc99-4ff0-adbc-a27616252dd0.vmdk to [datastore1] 4acfb474-d861-467a-983c-0dd5641e66f3/4acfb474-d861-467a-983c-0dd5641e66f3.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2165.426037] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9097dee6-2432-46b1-a848-78f920ca51d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.427592] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-769a04b6-d41d-4e81-9e65-21c95e0f85bf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.439846] env[63279]: INFO nova.scheduler.client.report [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Deleted allocations for instance 32dbef6d-d314-4fa6-972a-e7b1f22eb11d [ 2165.443595] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2408916-afbd-4bc2-874f-1c4719147309 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.455319] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2165.455319] env[63279]: value = "task-2087667" [ 2165.455319] env[63279]: _type = "Task" [ 2165.455319] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.467913] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087667, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.491898] env[63279]: DEBUG nova.compute.manager [req-447fd7d6-857f-4b18-9983-978d5850b3ba req-b9dda67c-8b8c-40e3-9dbf-000da1a0b617 service nova] [instance: f375b54b-f9de-4529-b752-52c240aed532] Detach interface failed, port_id=7dc16370-0621-49ff-9730-abdfd18ff164, reason: Instance f375b54b-f9de-4529-b752-52c240aed532 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2165.496028] env[63279]: DEBUG nova.virt.hardware [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2165.496694] env[63279]: DEBUG nova.virt.hardware [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2165.497033] env[63279]: DEBUG nova.virt.hardware [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2165.497415] env[63279]: DEBUG nova.virt.hardware [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2165.497699] env[63279]: DEBUG nova.virt.hardware [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2165.497971] env[63279]: DEBUG nova.virt.hardware [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2165.498378] env[63279]: DEBUG nova.virt.hardware [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2165.498699] env[63279]: DEBUG nova.virt.hardware [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2165.499048] env[63279]: DEBUG nova.virt.hardware [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2165.500362] env[63279]: DEBUG nova.virt.hardware [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2165.500362] env[63279]: DEBUG nova.virt.hardware [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2165.500977] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70620234-9112-4ada-9802-570b5454f317 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.513733] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dee660a-7c18-4b5f-bdec-d884e208f514 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.575811] env[63279]: DEBUG nova.network.neutron [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Successfully created port: 06151b64-defb-4dca-a89e-f4d66bcbeb62 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2165.578617] env[63279]: INFO nova.compute.manager [-] [instance: f375b54b-f9de-4529-b752-52c240aed532] Took 1.82 seconds to deallocate network for instance. [ 2165.622897] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087666, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.656896] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52dc4895-29ba-f9ae-a6db-1bc3211836f9, 'name': SearchDatastore_Task, 'duration_secs': 0.008773} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2165.657900] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff21dbbd-2593-4865-a5ed-f895f11f0db7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.664766] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2165.664766] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5220c174-e25e-69a6-6327-6b41ac30a690" [ 2165.664766] env[63279]: _type = "Task" [ 2165.664766] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.674624] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5220c174-e25e-69a6-6327-6b41ac30a690, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.741678] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.969898] env[63279]: DEBUG oslo_concurrency.lockutils [None req-88931e76-94a7-4e04-9054-f6a3f97882cc tempest-ServersV294TestFqdnHostnames-110312040 tempest-ServersV294TestFqdnHostnames-110312040-project-member] Lock "32dbef6d-d314-4fa6-972a-e7b1f22eb11d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.517s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.974365] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087667, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.087488] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a07898be-a6bd-404e-bc74-d9e85bd714b6 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2166.087843] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a07898be-a6bd-404e-bc74-d9e85bd714b6 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2166.088063] env[63279]: DEBUG nova.compute.manager [None req-a07898be-a6bd-404e-bc74-d9e85bd714b6 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2166.089825] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c1bc33-a1fd-4830-be2a-7b4ae03afd28 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.101073] env[63279]: DEBUG nova.compute.manager [None req-a07898be-a6bd-404e-bc74-d9e85bd714b6 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2166.101683] env[63279]: DEBUG nova.objects.instance [None req-a07898be-a6bd-404e-bc74-d9e85bd714b6 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lazy-loading 'flavor' on Instance uuid fcdd3076-2b53-4850-9730-2f877e2cabfd {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2166.123884] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087666, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563754} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2166.126606] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0/736ab3ac-b9e0-4f9e-885b-765ca7a92ed0.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2166.126825] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2166.127597] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90640178-a09d-4eae-b67c-6b40eb385a4c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.134406] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2166.134406] env[63279]: value = "task-2087668" [ 2166.134406] env[63279]: _type = "Task" [ 2166.134406] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.143883] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087668, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.148261] env[63279]: INFO nova.compute.manager [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Took 0.57 seconds to detach 1 volumes for instance. [ 2166.149537] env[63279]: DEBUG nova.compute.manager [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] [instance: f375b54b-f9de-4529-b752-52c240aed532] Deleting volume: 29ec2591-5e66-4668-ba47-c425dc65074c {{(pid=63279) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 2166.178196] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5220c174-e25e-69a6-6327-6b41ac30a690, 'name': SearchDatastore_Task, 'duration_secs': 0.017674} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2166.182152] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2166.182406] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 686a7ce2-2d07-411e-91d6-0471c55c3728/686a7ce2-2d07-411e-91d6-0471c55c3728.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2166.182902] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b061cfb-4cf6-48f2-ac09-227f1baca4c2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.194142] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2166.194142] env[63279]: value = "task-2087669" [ 2166.194142] env[63279]: _type = "Task" [ 2166.194142] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.215247] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.243064] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.280430] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f6f881-a6e0-4169-bc1c-1cbe98220926 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.294119] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d017e6bb-8e51-415a-b0ac-94826dc0ebb6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.350918] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4036967-dbe8-4e95-ac10-f63ccf74125d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.363089] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f45a8e88-9def-49ca-9de8-a49184840552 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.392901] env[63279]: DEBUG nova.compute.provider_tree [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2166.474929] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087667, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.645793] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087668, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.165585} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2166.646118] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2166.648487] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8ec297-8433-4538-943b-49819d154c66 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.679776] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0/736ab3ac-b9e0-4f9e-885b-765ca7a92ed0.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2166.680148] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24cc913f-d885-4c41-a0d1-8e21ef783d6d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.708220] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "interface-eca98392-98be-405b-b799-463ef9ee3dc8-753324ac-cff9-4577-92da-27cf8cceab06" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2166.708786] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-eca98392-98be-405b-b799-463ef9ee3dc8-753324ac-cff9-4577-92da-27cf8cceab06" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2166.712023] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.716966] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2166.716966] env[63279]: value = "task-2087671" [ 2166.716966] env[63279]: _type = "Task" [ 2166.716966] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.729835] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2166.730429] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087671, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.746410] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.966755] env[63279]: DEBUG nova.scheduler.client.report [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2166.973707] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087667, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.112173] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a07898be-a6bd-404e-bc74-d9e85bd714b6 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2167.112173] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bce0fa6c-0fda-4f2a-8f44-633ea726977d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.121085] env[63279]: DEBUG oslo_vmware.api [None req-a07898be-a6bd-404e-bc74-d9e85bd714b6 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2167.121085] env[63279]: value = "task-2087672" [ 2167.121085] env[63279]: _type = "Task" [ 2167.121085] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.130883] env[63279]: DEBUG oslo_vmware.api [None req-a07898be-a6bd-404e-bc74-d9e85bd714b6 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.208767] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.218681] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2167.218905] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2167.219988] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301f47a3-79f2-4944-ab2c-a912186ed85a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.252277] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7523d2be-874b-4f15-875f-2e59106e1e99 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.256280] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087671, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.263514] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.290751] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Reconfiguring VM to detach interface {{(pid=63279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 2167.290751] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-430bc5a3-e555-4429-842c-3fda1c8844f9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.310043] env[63279]: DEBUG oslo_vmware.api [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2167.310043] env[63279]: value = "task-2087673" [ 2167.310043] env[63279]: _type = "Task" [ 2167.310043] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.321296] env[63279]: DEBUG oslo_vmware.api [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087673, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.408546] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.017s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2167.414940] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.739s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2167.415428] env[63279]: DEBUG nova.objects.instance [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lazy-loading 'resources' on Instance uuid 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2167.451120] env[63279]: INFO nova.scheduler.client.report [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Deleted allocations for instance 5b5f87cb-cf35-418f-b5bd-b953524a285c [ 2167.479044] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087667, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.636969] env[63279]: DEBUG oslo_vmware.api [None req-a07898be-a6bd-404e-bc74-d9e85bd714b6 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087672, 'name': PowerOffVM_Task, 'duration_secs': 0.29395} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.637570] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a07898be-a6bd-404e-bc74-d9e85bd714b6 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2167.637935] env[63279]: DEBUG nova.compute.manager [None req-a07898be-a6bd-404e-bc74-d9e85bd714b6 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2167.639630] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ebcf61-c671-4937-acde-99f310dd190c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.709445] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.729856] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087671, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.754187] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.824055] env[63279]: DEBUG oslo_vmware.api [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.965079] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e17e85f-930c-4f26-80d6-8e5a59771f74 tempest-SecurityGroupsTestJSON-236425111 tempest-SecurityGroupsTestJSON-236425111-project-member] Lock "5b5f87cb-cf35-418f-b5bd-b953524a285c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.997s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2167.978697] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087667, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.009258] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2168.009649] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.155705] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a07898be-a6bd-404e-bc74-d9e85bd714b6 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.068s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.214761] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087669, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.233989] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087671, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.258843] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.267128] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa86efc-1ecf-4e47-8e0f-8fefb72fb2ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.275535] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9795b6-716a-43e4-adb9-f795207f7839 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.310938] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816296ee-d984-46fc-8bac-0c2fa4ec45ff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.328247] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d648ccc-ce9f-4f27-913d-d534ad45e46d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.332879] env[63279]: DEBUG oslo_vmware.api [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.345783] env[63279]: DEBUG nova.compute.provider_tree [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2168.479140] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087667, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.513280] env[63279]: DEBUG nova.compute.manager [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2168.712631] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087669, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.731512] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087671, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.760848] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.825058] env[63279]: DEBUG oslo_vmware.api [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.848297] env[63279]: DEBUG nova.scheduler.client.report [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2168.978780] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087667, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.300877} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2168.979075] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d2d04a47-fc99-4ff0-adbc-a27616252dd0/d2d04a47-fc99-4ff0-adbc-a27616252dd0.vmdk to [datastore1] 4acfb474-d861-467a-983c-0dd5641e66f3/4acfb474-d861-467a-983c-0dd5641e66f3.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2168.979937] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bb5338-686b-42fb-a847-873344d2cd06 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.005125] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 4acfb474-d861-467a-983c-0dd5641e66f3/4acfb474-d861-467a-983c-0dd5641e66f3.vmdk or device None with type streamOptimized {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2169.005507] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbfd3a6d-f1a1-45c5-87f4-61ec266942e1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.028740] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2169.028740] env[63279]: value = "task-2087674" [ 2169.028740] env[63279]: _type = "Task" [ 2169.028740] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.038348] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087674, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.044351] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2169.212230] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087669, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.570462} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2169.212502] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 686a7ce2-2d07-411e-91d6-0471c55c3728/686a7ce2-2d07-411e-91d6-0471c55c3728.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2169.212711] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2169.213019] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-12e08c35-85e9-42ae-9306-97b0a71707a8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.219898] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2169.219898] env[63279]: value = "task-2087675" [ 2169.219898] env[63279]: _type = "Task" [ 2169.219898] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.249552] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087675, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.251110] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087671, 'name': ReconfigVM_Task, 'duration_secs': 2.27771} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2169.256536] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0/736ab3ac-b9e0-4f9e-885b-765ca7a92ed0.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2169.260590] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1d920072-2540-4faa-8485-a472b11e5aea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.269063] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.270541] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2169.270541] env[63279]: value = "task-2087676" [ 2169.270541] env[63279]: _type = "Task" [ 2169.270541] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.279885] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087676, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.326946] env[63279]: DEBUG oslo_vmware.api [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.353973] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.939s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.356426] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.627s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2169.356678] env[63279]: DEBUG nova.objects.instance [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Lazy-loading 'resources' on Instance uuid f375b54b-f9de-4529-b752-52c240aed532 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2169.389956] env[63279]: INFO nova.scheduler.client.report [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Deleted allocations for instance 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51 [ 2169.540922] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087674, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.734022] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087675, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067551} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2169.734022] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2169.734996] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188866c6-bff7-4a5d-a238-455c3c09a4ea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.758946] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 686a7ce2-2d07-411e-91d6-0471c55c3728/686a7ce2-2d07-411e-91d6-0471c55c3728.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2169.759842] env[63279]: DEBUG nova.objects.instance [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lazy-loading 'flavor' on Instance uuid fcdd3076-2b53-4850-9730-2f877e2cabfd {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2169.761364] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac514d16-7192-4969-8efe-a21a42ac00da {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.786894] env[63279]: DEBUG oslo_concurrency.lockutils [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2169.787299] env[63279]: DEBUG oslo_concurrency.lockutils [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2169.787516] env[63279]: DEBUG nova.network.neutron [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2169.787858] env[63279]: DEBUG nova.objects.instance [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lazy-loading 'info_cache' on Instance uuid fcdd3076-2b53-4850-9730-2f877e2cabfd {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2169.806565] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.806884] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2169.806884] env[63279]: value = "task-2087677" [ 2169.806884] env[63279]: _type = "Task" [ 2169.806884] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.807110] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087676, 'name': Rename_Task, 'duration_secs': 0.252742} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2169.807419] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2169.811406] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2619f294-3d8e-4014-a4b8-a5e541ef54dd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.819547] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.824727] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2169.824727] env[63279]: value = "task-2087678" [ 2169.824727] env[63279]: _type = "Task" [ 2169.824727] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.833356] env[63279]: DEBUG oslo_vmware.api [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.837659] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087678, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.906432] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d343f0d-29ec-4ca4-9558-62f940177dd9 tempest-ServersAdminTestJSON-1578757811 tempest-ServersAdminTestJSON-1578757811-project-member] Lock "6388f912-ae70-4e8f-b8e4-ceb02e0f8a51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.051s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2170.042449] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087674, 'name': ReconfigVM_Task, 'duration_secs': 0.535373} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.042827] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 4acfb474-d861-467a-983c-0dd5641e66f3/4acfb474-d861-467a-983c-0dd5641e66f3.vmdk or device None with type streamOptimized {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2170.043491] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a7440f9d-8f99-4ded-9575-f9f3eb0553f9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.050866] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2170.050866] env[63279]: value = "task-2087679" [ 2170.050866] env[63279]: _type = "Task" [ 2170.050866] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.061928] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087679, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.163432] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c7f828-3bd8-473c-9fbf-bd0b4508a201 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.171126] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d0d8b4-6aa0-4d2c-a31c-3cb966dbdc6f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.205486] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cacc7731-2466-4be8-aece-8416490c1c6b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.214202] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ebaf8f-17e3-4c04-92f2-0edc973a5ed8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.231209] env[63279]: DEBUG nova.compute.provider_tree [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2170.270159] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.300086] env[63279]: DEBUG nova.objects.base [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2170.324496] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087677, 'name': ReconfigVM_Task, 'duration_secs': 0.39127} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.325991] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 686a7ce2-2d07-411e-91d6-0471c55c3728/686a7ce2-2d07-411e-91d6-0471c55c3728.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2170.327136] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-231111d1-ec0e-4c24-8b03-3f34e83100f4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.342641] env[63279]: DEBUG oslo_vmware.api [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.347313] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2170.347313] env[63279]: value = "task-2087680" [ 2170.347313] env[63279]: _type = "Task" [ 2170.347313] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.347851] env[63279]: DEBUG oslo_vmware.api [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087678, 'name': PowerOnVM_Task, 'duration_secs': 0.517301} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.348010] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2170.348268] env[63279]: INFO nova.compute.manager [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Took 11.72 seconds to spawn the instance on the hypervisor. [ 2170.348461] env[63279]: DEBUG nova.compute.manager [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2170.352428] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4f5534-e602-4239-a19a-64f2f7ea06f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.364147] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087680, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.563219] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087679, 'name': Rename_Task, 'duration_secs': 0.22194} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.563512] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2170.563763] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23635aac-c237-4f34-bc4e-7a880c37f1b6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.573670] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2170.573670] env[63279]: value = "task-2087681" [ 2170.573670] env[63279]: _type = "Task" [ 2170.573670] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.586016] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087681, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.736375] env[63279]: DEBUG nova.scheduler.client.report [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2170.772547] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.839838] env[63279]: DEBUG oslo_vmware.api [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.861033] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087680, 'name': Rename_Task, 'duration_secs': 0.144082} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.861467] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2170.861884] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f22b2e50-dc65-4ac2-bb3f-7744a2780340 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.869890] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2170.869890] env[63279]: value = "task-2087682" [ 2170.869890] env[63279]: _type = "Task" [ 2170.869890] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.889520] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087682, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.894929] env[63279]: INFO nova.compute.manager [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Took 26.81 seconds to build instance. [ 2171.084932] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087681, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.242049] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.886s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2171.245395] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.201s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2171.247250] env[63279]: INFO nova.compute.claims [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2171.272970] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.286855] env[63279]: INFO nova.scheduler.client.report [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Deleted allocations for instance f375b54b-f9de-4529-b752-52c240aed532 [ 2171.340639] env[63279]: DEBUG oslo_vmware.api [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.383695] env[63279]: DEBUG oslo_vmware.api [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087682, 'name': PowerOnVM_Task, 'duration_secs': 0.512719} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2171.384092] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2171.384225] env[63279]: INFO nova.compute.manager [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Took 10.33 seconds to spawn the instance on the hypervisor. [ 2171.384392] env[63279]: DEBUG nova.compute.manager [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2171.385339] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ca1af9-fc0f-493e-8b00-f51bd5edf772 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.396379] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b6feae97-fa7d-41e8-b8d5-106af4509856 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.325s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2171.446214] env[63279]: DEBUG nova.network.neutron [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance_info_cache with network_info: [{"id": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "address": "fa:16:3e:cc:16:0a", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7baa1106-63", "ovs_interfaceid": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2171.585299] env[63279]: DEBUG oslo_vmware.api [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087681, 'name': PowerOnVM_Task, 'duration_secs': 0.572547} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2171.585671] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2171.585943] env[63279]: INFO nova.compute.manager [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Took 27.91 seconds to spawn the instance on the hypervisor. [ 2171.586212] env[63279]: DEBUG nova.compute.manager [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2171.587098] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c5165d-10e9-44ab-b206-1aa1452dea54 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.776652] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.797748] env[63279]: DEBUG oslo_concurrency.lockutils [None req-3b4d6d87-fd3c-4686-b1f3-07b747c233dd tempest-ServerActionsV293TestJSON-360186716 tempest-ServerActionsV293TestJSON-360186716-project-member] Lock "f375b54b-f9de-4529-b752-52c240aed532" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.591s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2171.834150] env[63279]: DEBUG oslo_vmware.api [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.906129] env[63279]: INFO nova.compute.manager [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Took 25.20 seconds to build instance. [ 2171.919286] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2171.919286] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2171.919286] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Deleting the datastore file [datastore1] c8b42e3b-b841-4b79-a4f3-ef62577d4902 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2171.919286] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0843762c-029a-4c52-a1cf-c36f6acfc3cf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.929182] env[63279]: DEBUG oslo_vmware.api [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Waiting for the task: (returnval){ [ 2171.929182] env[63279]: value = "task-2087683" [ 2171.929182] env[63279]: _type = "Task" [ 2171.929182] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.938952] env[63279]: DEBUG oslo_vmware.api [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': task-2087683, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.949588] env[63279]: DEBUG oslo_concurrency.lockutils [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2172.107777] env[63279]: INFO nova.compute.manager [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Took 45.12 seconds to build instance. [ 2172.280931] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087664, 'name': Destroy_Task, 'duration_secs': 7.352573} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.283016] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Destroyed the VM [ 2172.283016] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2172.283016] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-532366e9-b559-4ead-9dc4-0795f47302e7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.293547] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2172.293547] env[63279]: value = "task-2087684" [ 2172.293547] env[63279]: _type = "Task" [ 2172.293547] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.309386] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087684, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.338983] env[63279]: DEBUG oslo_vmware.api [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.414601] env[63279]: DEBUG oslo_concurrency.lockutils [None req-718a590a-91c9-4ba7-b80f-c0d0ab309a1d tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "686a7ce2-2d07-411e-91d6-0471c55c3728" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.715s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2172.438464] env[63279]: DEBUG oslo_vmware.api [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Task: {'id': task-2087683, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.353934} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.438606] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2172.438847] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2172.439029] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2172.439218] env[63279]: INFO nova.compute.manager [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Took 8.33 seconds to destroy the instance on the hypervisor. [ 2172.439592] env[63279]: DEBUG oslo.service.loopingcall [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2172.439812] env[63279]: DEBUG nova.compute.manager [-] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2172.439909] env[63279]: DEBUG nova.network.neutron [-] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2172.587159] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d96027-f822-4043-8e48-9bc0e56c3b21 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.597031] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d9bbd4-4904-4fc9-ada9-d4a1e7549bc0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.627398] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2668d1a-4c43-4202-b04d-63b7f5aec853 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "4acfb474-d861-467a-983c-0dd5641e66f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.650s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2172.628428] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa00a0cc-5b2e-449f-b5e8-9b4bb5d8ad34 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.636643] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2816a9fa-58c3-45d7-9811-fdf316df10c5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.654313] env[63279]: DEBUG nova.compute.provider_tree [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2172.804919] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087684, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.838035] env[63279]: DEBUG oslo_vmware.api [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087673, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.958731] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2172.959090] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86a11539-f2aa-4f50-8f3f-99bc07ed31b4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.967948] env[63279]: DEBUG oslo_vmware.api [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2172.967948] env[63279]: value = "task-2087685" [ 2172.967948] env[63279]: _type = "Task" [ 2172.967948] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.976816] env[63279]: DEBUG oslo_vmware.api [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087685, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.185977] env[63279]: ERROR nova.scheduler.client.report [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [req-1c13959b-182f-4d30-852b-067baa8431cc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1c13959b-182f-4d30-852b-067baa8431cc"}]} [ 2173.210617] env[63279]: DEBUG nova.scheduler.client.report [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2173.221823] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "4acfb474-d861-467a-983c-0dd5641e66f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2173.221823] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "4acfb474-d861-467a-983c-0dd5641e66f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2173.221823] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "4acfb474-d861-467a-983c-0dd5641e66f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2173.221823] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "4acfb474-d861-467a-983c-0dd5641e66f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2173.221823] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "4acfb474-d861-467a-983c-0dd5641e66f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2173.224327] env[63279]: INFO nova.compute.manager [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Terminating instance [ 2173.232512] env[63279]: DEBUG nova.scheduler.client.report [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2173.232744] env[63279]: DEBUG nova.compute.provider_tree [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2173.248256] env[63279]: DEBUG nova.scheduler.client.report [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2173.270912] env[63279]: DEBUG nova.scheduler.client.report [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2173.305983] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087684, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.341671] env[63279]: DEBUG oslo_vmware.api [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087673, 'name': ReconfigVM_Task, 'duration_secs': 5.940785} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.345329] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2173.345329] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Reconfigured VM to detach interface {{(pid=63279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 2173.481932] env[63279]: DEBUG oslo_vmware.api [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087685, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.545517] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9c824d-ce81-439e-8f09-6d5e743b2e23 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.552919] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2c1c22-0416-4050-a693-e7986d9ff14f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.582921] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0acee4f-689c-4b83-8807-687b1a2f3574 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.590797] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddef1d6-ff71-49a9-bb37-545cf152387d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.606547] env[63279]: DEBUG nova.compute.provider_tree [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2173.716112] env[63279]: DEBUG nova.network.neutron [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Successfully updated port: f5f17197-3d49-4ae2-a895-9bab23e07301 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2173.728059] env[63279]: DEBUG nova.compute.manager [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2173.728308] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2173.729510] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff7a50f-78d2-4314-9c9a-ee3d9da57235 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.741728] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2173.742193] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-405d3da6-831a-4c6b-896b-d86163bc671b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.749390] env[63279]: DEBUG oslo_vmware.api [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2173.749390] env[63279]: value = "task-2087686" [ 2173.749390] env[63279]: _type = "Task" [ 2173.749390] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.759028] env[63279]: DEBUG oslo_vmware.api [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.788593] env[63279]: DEBUG nova.compute.manager [req-a0ef0e77-d26c-43af-bb86-03faadc0ed1c req-3dc568b4-661f-4388-b324-232f190aa18f service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Received event network-vif-plugged-f5f17197-3d49-4ae2-a895-9bab23e07301 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2173.788593] env[63279]: DEBUG oslo_concurrency.lockutils [req-a0ef0e77-d26c-43af-bb86-03faadc0ed1c req-3dc568b4-661f-4388-b324-232f190aa18f service nova] Acquiring lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2173.788741] env[63279]: DEBUG oslo_concurrency.lockutils [req-a0ef0e77-d26c-43af-bb86-03faadc0ed1c req-3dc568b4-661f-4388-b324-232f190aa18f service nova] Lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2173.789749] env[63279]: DEBUG oslo_concurrency.lockutils [req-a0ef0e77-d26c-43af-bb86-03faadc0ed1c req-3dc568b4-661f-4388-b324-232f190aa18f service nova] Lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2173.789749] env[63279]: DEBUG nova.compute.manager [req-a0ef0e77-d26c-43af-bb86-03faadc0ed1c req-3dc568b4-661f-4388-b324-232f190aa18f service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] No waiting events found dispatching network-vif-plugged-f5f17197-3d49-4ae2-a895-9bab23e07301 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2173.790149] env[63279]: WARNING nova.compute.manager [req-a0ef0e77-d26c-43af-bb86-03faadc0ed1c req-3dc568b4-661f-4388-b324-232f190aa18f service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Received unexpected event network-vif-plugged-f5f17197-3d49-4ae2-a895-9bab23e07301 for instance with vm_state building and task_state spawning. [ 2173.807123] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087684, 'name': RemoveSnapshot_Task, 'duration_secs': 1.373722} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.807492] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2173.807837] env[63279]: DEBUG nova.compute.manager [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2173.808652] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb3a646-4ab4-4859-970f-0fb8b53cc582 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.980973] env[63279]: DEBUG oslo_vmware.api [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087685, 'name': PowerOnVM_Task, 'duration_secs': 0.574556} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.981291] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2173.981516] env[63279]: DEBUG nova.compute.manager [None req-837a40d2-b2cf-457a-9bbe-1c1cdc807f4c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2173.982597] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e86024-d239-4c4e-837f-1af75133e3d8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.130508] env[63279]: ERROR nova.scheduler.client.report [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [req-718b4066-a9c6-46d3-a362-3a1460d2eb4c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-718b4066-a9c6-46d3-a362-3a1460d2eb4c"}]} [ 2174.160503] env[63279]: DEBUG nova.scheduler.client.report [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2174.176568] env[63279]: DEBUG nova.scheduler.client.report [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2174.178015] env[63279]: DEBUG nova.compute.provider_tree [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2174.191630] env[63279]: DEBUG nova.scheduler.client.report [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2174.214819] env[63279]: DEBUG nova.scheduler.client.report [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2174.227341] env[63279]: DEBUG nova.compute.manager [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Stashing vm_state: active {{(pid=63279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2174.265784] env[63279]: DEBUG oslo_vmware.api [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087686, 'name': PowerOffVM_Task, 'duration_secs': 0.254531} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.268980] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2174.268980] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2174.269490] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-057e3428-1381-446b-9df5-485c696cdf25 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.280119] env[63279]: DEBUG nova.network.neutron [-] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2174.326196] env[63279]: INFO nova.compute.manager [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Shelve offloading [ 2174.402585] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2174.402814] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2174.403010] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleting the datastore file [datastore1] 4acfb474-d861-467a-983c-0dd5641e66f3 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2174.403280] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb647681-8927-40dd-b52e-7f061c7826b6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.409990] env[63279]: DEBUG oslo_vmware.api [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2174.409990] env[63279]: value = "task-2087688" [ 2174.409990] env[63279]: _type = "Task" [ 2174.409990] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.420693] env[63279]: DEBUG oslo_vmware.api [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.546171] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0259d1f0-3da9-4dfc-8a8c-f050d2e3424c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.554608] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dca2939-c4ce-4479-86a9-0c8ec9156e09 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.591615] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59d0b1a-0fe2-445a-8494-4686374aa3f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.601411] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3a2b4c-c1ef-401e-8b2b-dd46f62b01e2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.616682] env[63279]: DEBUG nova.compute.provider_tree [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2174.624943] env[63279]: DEBUG oslo_concurrency.lockutils [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "2f5e22f6-ba70-4848-965b-eb1553115323" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2174.625230] env[63279]: DEBUG oslo_concurrency.lockutils [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "2f5e22f6-ba70-4848-965b-eb1553115323" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2174.625447] env[63279]: DEBUG oslo_concurrency.lockutils [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "2f5e22f6-ba70-4848-965b-eb1553115323-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2174.625643] env[63279]: DEBUG oslo_concurrency.lockutils [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "2f5e22f6-ba70-4848-965b-eb1553115323-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2174.625820] env[63279]: DEBUG oslo_concurrency.lockutils [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "2f5e22f6-ba70-4848-965b-eb1553115323-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2174.628227] env[63279]: INFO nova.compute.manager [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Terminating instance [ 2174.720581] env[63279]: DEBUG nova.compute.manager [req-955d6031-7f89-4152-89f1-ab3ef64c939d req-22a0c564-1a7e-4fd6-adaa-22df0d050ea4 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Received event network-vif-deleted-753324ac-cff9-4577-92da-27cf8cceab06 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2174.720799] env[63279]: INFO nova.compute.manager [req-955d6031-7f89-4152-89f1-ab3ef64c939d req-22a0c564-1a7e-4fd6-adaa-22df0d050ea4 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Neutron deleted interface 753324ac-cff9-4577-92da-27cf8cceab06; detaching it from the instance and deleting it from the info cache [ 2174.721127] env[63279]: DEBUG nova.network.neutron [req-955d6031-7f89-4152-89f1-ab3ef64c939d req-22a0c564-1a7e-4fd6-adaa-22df0d050ea4 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Updating instance_info_cache with network_info: [{"id": "d609f907-0c7e-4293-8b21-721e712e8dc2", "address": "fa:16:3e:0e:eb:3d", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd609f907-0c", "ovs_interfaceid": "d609f907-0c7e-4293-8b21-721e712e8dc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2174.751272] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2174.783085] env[63279]: INFO nova.compute.manager [-] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Took 2.34 seconds to deallocate network for instance. [ 2174.831802] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2174.832341] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9347bb9d-9c92-490e-b36b-d58f5d283ddb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.841442] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2174.841442] env[63279]: value = "task-2087689" [ 2174.841442] env[63279]: _type = "Task" [ 2174.841442] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.852707] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] VM already powered off {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2174.852707] env[63279]: DEBUG nova.compute.manager [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2174.854787] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574c861f-5f27-4871-b038-4d6ba92f3768 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.865071] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "refresh_cache-d6e40dbc-f20e-4164-b460-18de6ea72906" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2174.865071] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "refresh_cache-d6e40dbc-f20e-4164-b460-18de6ea72906" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2174.865071] env[63279]: DEBUG nova.network.neutron [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2174.920676] env[63279]: DEBUG oslo_vmware.api [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.383777} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.920936] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2174.921140] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2174.921411] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2174.921487] env[63279]: INFO nova.compute.manager [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Took 1.19 seconds to destroy the instance on the hypervisor. [ 2174.921729] env[63279]: DEBUG oslo.service.loopingcall [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2174.921942] env[63279]: DEBUG nova.compute.manager [-] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2174.922049] env[63279]: DEBUG nova.network.neutron [-] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2175.132998] env[63279]: DEBUG nova.compute.manager [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2175.133158] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2175.134083] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de48d40-d42a-4c67-8a36-c48956f68b91 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.150868] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2175.152753] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e886ed7-2121-44d9-9be9-49090553b724 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.161423] env[63279]: DEBUG oslo_vmware.api [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2175.161423] env[63279]: value = "task-2087690" [ 2175.161423] env[63279]: _type = "Task" [ 2175.161423] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.166754] env[63279]: DEBUG nova.scheduler.client.report [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 114 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2175.166936] env[63279]: DEBUG nova.compute.provider_tree [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 114 to 115 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2175.167060] env[63279]: DEBUG nova.compute.provider_tree [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2175.181205] env[63279]: DEBUG oslo_vmware.api [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087690, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.225482] env[63279]: DEBUG oslo_concurrency.lockutils [req-955d6031-7f89-4152-89f1-ab3ef64c939d req-22a0c564-1a7e-4fd6-adaa-22df0d050ea4 service nova] Acquiring lock "eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2175.225837] env[63279]: DEBUG oslo_concurrency.lockutils [req-955d6031-7f89-4152-89f1-ab3ef64c939d req-22a0c564-1a7e-4fd6-adaa-22df0d050ea4 service nova] Acquired lock "eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2175.227898] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43668895-e87f-4b92-ab4b-308b3d068198 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.252850] env[63279]: DEBUG oslo_concurrency.lockutils [req-955d6031-7f89-4152-89f1-ab3ef64c939d req-22a0c564-1a7e-4fd6-adaa-22df0d050ea4 service nova] Releasing lock "eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2175.253207] env[63279]: WARNING nova.compute.manager [req-955d6031-7f89-4152-89f1-ab3ef64c939d req-22a0c564-1a7e-4fd6-adaa-22df0d050ea4 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Detach interface failed, port_id=753324ac-cff9-4577-92da-27cf8cceab06, reason: No device with interface-id 753324ac-cff9-4577-92da-27cf8cceab06 exists on VM: nova.exception.NotFound: No device with interface-id 753324ac-cff9-4577-92da-27cf8cceab06 exists on VM [ 2175.290928] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.672360] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.427s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2175.672768] env[63279]: DEBUG nova.compute.manager [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2175.676427] env[63279]: DEBUG oslo_vmware.api [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087690, 'name': PowerOffVM_Task, 'duration_secs': 0.36555} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.676788] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.927s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2175.678457] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2175.678857] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2175.678985] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d65f2cf0-0bab-4a3d-9e31-af49b4089b86 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.685456] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2175.685749] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2175.685946] env[63279]: DEBUG nova.network.neutron [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2175.719470] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "eca98392-98be-405b-b799-463ef9ee3dc8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.719924] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "eca98392-98be-405b-b799-463ef9ee3dc8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2175.720903] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "eca98392-98be-405b-b799-463ef9ee3dc8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.720903] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "eca98392-98be-405b-b799-463ef9ee3dc8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2175.721148] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "eca98392-98be-405b-b799-463ef9ee3dc8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2175.725291] env[63279]: INFO nova.compute.manager [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Terminating instance [ 2175.756180] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2175.756417] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2175.756604] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Deleting the datastore file [datastore1] 2f5e22f6-ba70-4848-965b-eb1553115323 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2175.756875] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aea21457-d5db-4e1e-9a1c-03426a80bf2c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.764410] env[63279]: DEBUG oslo_vmware.api [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for the task: (returnval){ [ 2175.764410] env[63279]: value = "task-2087692" [ 2175.764410] env[63279]: _type = "Task" [ 2175.764410] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.773370] env[63279]: DEBUG oslo_vmware.api [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087692, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.030108] env[63279]: DEBUG nova.compute.manager [req-4034f36c-991f-48b2-a409-af9dcf254618 req-e3bb5125-36a7-4ed4-ab57-2af8a54f1f48 service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Received event network-changed-f5f17197-3d49-4ae2-a895-9bab23e07301 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2176.030406] env[63279]: DEBUG nova.compute.manager [req-4034f36c-991f-48b2-a409-af9dcf254618 req-e3bb5125-36a7-4ed4-ab57-2af8a54f1f48 service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Refreshing instance network info cache due to event network-changed-f5f17197-3d49-4ae2-a895-9bab23e07301. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2176.030587] env[63279]: DEBUG oslo_concurrency.lockutils [req-4034f36c-991f-48b2-a409-af9dcf254618 req-e3bb5125-36a7-4ed4-ab57-2af8a54f1f48 service nova] Acquiring lock "refresh_cache-75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2176.031392] env[63279]: DEBUG oslo_concurrency.lockutils [req-4034f36c-991f-48b2-a409-af9dcf254618 req-e3bb5125-36a7-4ed4-ab57-2af8a54f1f48 service nova] Acquired lock "refresh_cache-75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2176.031597] env[63279]: DEBUG nova.network.neutron [req-4034f36c-991f-48b2-a409-af9dcf254618 req-e3bb5125-36a7-4ed4-ab57-2af8a54f1f48 service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Refreshing network info cache for port f5f17197-3d49-4ae2-a895-9bab23e07301 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2176.180380] env[63279]: DEBUG nova.compute.utils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2176.180380] env[63279]: DEBUG nova.compute.manager [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2176.180380] env[63279]: DEBUG nova.network.neutron [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2176.188030] env[63279]: INFO nova.compute.claims [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2176.232261] env[63279]: DEBUG nova.compute.manager [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2176.234300] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2176.234547] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6399a7-47f9-4112-84d1-b310a6871f15 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.246581] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2176.246581] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de301ea2-6bf3-4223-b026-dd7f770b9ef1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.257424] env[63279]: DEBUG oslo_vmware.api [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2176.257424] env[63279]: value = "task-2087693" [ 2176.257424] env[63279]: _type = "Task" [ 2176.257424] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.269267] env[63279]: DEBUG oslo_vmware.api [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087693, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.278526] env[63279]: DEBUG oslo_vmware.api [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087692, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.280118] env[63279]: DEBUG nova.network.neutron [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Updating instance_info_cache with network_info: [{"id": "374ef31e-8b37-4d61-8e90-7ac4e4279a08", "address": "fa:16:3e:0b:c3:96", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap374ef31e-8b", "ovs_interfaceid": "374ef31e-8b37-4d61-8e90-7ac4e4279a08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2176.377736] env[63279]: DEBUG nova.policy [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c15683ebb604c9ba7d18e112476a8a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1fc9b60ae304455097b8be9a276796fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2176.379605] env[63279]: DEBUG nova.network.neutron [-] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2176.691268] env[63279]: DEBUG nova.network.neutron [req-4034f36c-991f-48b2-a409-af9dcf254618 req-e3bb5125-36a7-4ed4-ab57-2af8a54f1f48 service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2176.697470] env[63279]: DEBUG nova.compute.manager [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2176.704199] env[63279]: INFO nova.compute.resource_tracker [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updating resource usage from migration 90100f45-a8e0-4d84-adb1-bbecb55989d8 [ 2176.768197] env[63279]: DEBUG oslo_vmware.api [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087693, 'name': PowerOffVM_Task, 'duration_secs': 0.307991} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2176.768625] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2176.768828] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2176.772565] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-259d560c-999d-43af-818f-885c9b3f69a9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.782560] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "refresh_cache-d6e40dbc-f20e-4164-b460-18de6ea72906" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2176.784466] env[63279]: DEBUG oslo_vmware.api [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Task: {'id': task-2087692, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.645527} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2176.784724] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2176.784915] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2176.785134] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2176.785286] env[63279]: INFO nova.compute.manager [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Took 1.65 seconds to destroy the instance on the hypervisor. [ 2176.785539] env[63279]: DEBUG oslo.service.loopingcall [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2176.786011] env[63279]: DEBUG nova.compute.manager [-] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2176.786118] env[63279]: DEBUG nova.network.neutron [-] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2176.882451] env[63279]: INFO nova.compute.manager [-] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Took 1.96 seconds to deallocate network for instance. [ 2176.929538] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2176.929978] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2176.929978] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Deleting the datastore file [datastore1] eca98392-98be-405b-b799-463ef9ee3dc8 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2176.932812] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c6b6a4ac-dc9f-4c11-a3b5-2e809a115f80 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.940980] env[63279]: DEBUG oslo_vmware.api [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2176.940980] env[63279]: value = "task-2087695" [ 2176.940980] env[63279]: _type = "Task" [ 2176.940980] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.949350] env[63279]: DEBUG oslo_vmware.api [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087695, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.017513] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248e5599-8766-4673-ad01-ce88626d01f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.025730] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31882279-8dc1-4692-ba6d-6bc13d065d86 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.060201] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487b2395-1bda-4888-bf07-bddd1d7f35e9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.068117] env[63279]: DEBUG nova.network.neutron [req-4034f36c-991f-48b2-a409-af9dcf254618 req-e3bb5125-36a7-4ed4-ab57-2af8a54f1f48 service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2177.070326] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c3240a2-2d52-4a46-9419-8f2b1e379c12 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.088334] env[63279]: DEBUG nova.compute.provider_tree [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2177.109956] env[63279]: DEBUG nova.network.neutron [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Updating instance_info_cache with network_info: [{"id": "d609f907-0c7e-4293-8b21-721e712e8dc2", "address": "fa:16:3e:0e:eb:3d", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd609f907-0c", "ovs_interfaceid": "d609f907-0c7e-4293-8b21-721e712e8dc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2177.113302] env[63279]: DEBUG nova.compute.manager [req-c50600c0-4866-4d4e-9cdc-a0ce52ea3f07 req-2fe29543-5db2-4bdc-9ef2-eebdee7a9578 service nova] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Received event network-changed-55a22eb2-68fd-4cc1-8372-6fed483f16d0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2177.113496] env[63279]: DEBUG nova.compute.manager [req-c50600c0-4866-4d4e-9cdc-a0ce52ea3f07 req-2fe29543-5db2-4bdc-9ef2-eebdee7a9578 service nova] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Refreshing instance network info cache due to event network-changed-55a22eb2-68fd-4cc1-8372-6fed483f16d0. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2177.113718] env[63279]: DEBUG oslo_concurrency.lockutils [req-c50600c0-4866-4d4e-9cdc-a0ce52ea3f07 req-2fe29543-5db2-4bdc-9ef2-eebdee7a9578 service nova] Acquiring lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2177.114220] env[63279]: DEBUG oslo_concurrency.lockutils [req-c50600c0-4866-4d4e-9cdc-a0ce52ea3f07 req-2fe29543-5db2-4bdc-9ef2-eebdee7a9578 service nova] Acquired lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2177.114523] env[63279]: DEBUG nova.network.neutron [req-c50600c0-4866-4d4e-9cdc-a0ce52ea3f07 req-2fe29543-5db2-4bdc-9ef2-eebdee7a9578 service nova] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Refreshing network info cache for port 55a22eb2-68fd-4cc1-8372-6fed483f16d0 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2177.367852] env[63279]: DEBUG nova.network.neutron [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Successfully updated port: 2053aee1-c71d-4f3d-99d3-989823b04e65 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2177.389887] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2177.453129] env[63279]: DEBUG oslo_vmware.api [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087695, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.576714] env[63279]: DEBUG oslo_concurrency.lockutils [req-4034f36c-991f-48b2-a409-af9dcf254618 req-e3bb5125-36a7-4ed4-ab57-2af8a54f1f48 service nova] Releasing lock "refresh_cache-75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2177.577458] env[63279]: DEBUG nova.compute.manager [req-4034f36c-991f-48b2-a409-af9dcf254618 req-e3bb5125-36a7-4ed4-ab57-2af8a54f1f48 service nova] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Received event network-vif-deleted-2cc659c6-5797-4426-a92a-3924cc611395 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2177.593503] env[63279]: DEBUG nova.scheduler.client.report [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2177.617032] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "refresh_cache-eca98392-98be-405b-b799-463ef9ee3dc8" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2177.709789] env[63279]: DEBUG nova.compute.manager [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2177.740219] env[63279]: DEBUG nova.virt.hardware [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2177.740608] env[63279]: DEBUG nova.virt.hardware [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2177.740671] env[63279]: DEBUG nova.virt.hardware [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2177.740851] env[63279]: DEBUG nova.virt.hardware [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2177.741033] env[63279]: DEBUG nova.virt.hardware [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2177.744982] env[63279]: DEBUG nova.virt.hardware [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2177.744982] env[63279]: DEBUG nova.virt.hardware [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2177.744982] env[63279]: DEBUG nova.virt.hardware [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2177.744982] env[63279]: DEBUG nova.virt.hardware [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2177.744982] env[63279]: DEBUG nova.virt.hardware [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2177.744982] env[63279]: DEBUG nova.virt.hardware [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2177.745875] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19eefed7-ff47-4c29-a2f7-3c29d58ab97e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.756243] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f49228fc-5c84-4637-b59c-86ac8cabbbdf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.900625] env[63279]: DEBUG nova.network.neutron [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Successfully created port: 8f12bb0d-eec1-4c21-b319-372b37e319ca {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2177.952888] env[63279]: DEBUG oslo_vmware.api [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087695, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.543356} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.953233] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2177.953433] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2177.953619] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2177.953800] env[63279]: INFO nova.compute.manager [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Took 1.72 seconds to destroy the instance on the hypervisor. [ 2177.954063] env[63279]: DEBUG oslo.service.loopingcall [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2177.954267] env[63279]: DEBUG nova.compute.manager [-] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2177.954363] env[63279]: DEBUG nova.network.neutron [-] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2178.104452] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.426s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2178.104452] env[63279]: INFO nova.compute.manager [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Migrating [ 2178.112476] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.822s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2178.113254] env[63279]: DEBUG nova.objects.instance [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Lazy-loading 'resources' on Instance uuid c8b42e3b-b841-4b79-a4f3-ef62577d4902 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2178.128342] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1a1560e9-3891-4b82-b4ac-f78b55448c2c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-eca98392-98be-405b-b799-463ef9ee3dc8-753324ac-cff9-4577-92da-27cf8cceab06" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 11.420s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2178.245625] env[63279]: DEBUG nova.compute.manager [req-ecc7cfc6-a414-4deb-b288-8bb91f8befea req-9db4ebfd-7e4d-42c6-86bc-8a68201ce73a service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Received event network-vif-plugged-2053aee1-c71d-4f3d-99d3-989823b04e65 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2178.245915] env[63279]: DEBUG oslo_concurrency.lockutils [req-ecc7cfc6-a414-4deb-b288-8bb91f8befea req-9db4ebfd-7e4d-42c6-86bc-8a68201ce73a service nova] Acquiring lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2178.246162] env[63279]: DEBUG oslo_concurrency.lockutils [req-ecc7cfc6-a414-4deb-b288-8bb91f8befea req-9db4ebfd-7e4d-42c6-86bc-8a68201ce73a service nova] Lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2178.246522] env[63279]: DEBUG oslo_concurrency.lockutils [req-ecc7cfc6-a414-4deb-b288-8bb91f8befea req-9db4ebfd-7e4d-42c6-86bc-8a68201ce73a service nova] Lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2178.246752] env[63279]: DEBUG nova.compute.manager [req-ecc7cfc6-a414-4deb-b288-8bb91f8befea req-9db4ebfd-7e4d-42c6-86bc-8a68201ce73a service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] No waiting events found dispatching network-vif-plugged-2053aee1-c71d-4f3d-99d3-989823b04e65 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2178.247495] env[63279]: WARNING nova.compute.manager [req-ecc7cfc6-a414-4deb-b288-8bb91f8befea req-9db4ebfd-7e4d-42c6-86bc-8a68201ce73a service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Received unexpected event network-vif-plugged-2053aee1-c71d-4f3d-99d3-989823b04e65 for instance with vm_state building and task_state spawning. [ 2178.247495] env[63279]: DEBUG nova.compute.manager [req-ecc7cfc6-a414-4deb-b288-8bb91f8befea req-9db4ebfd-7e4d-42c6-86bc-8a68201ce73a service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Received event network-changed-2053aee1-c71d-4f3d-99d3-989823b04e65 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2178.247495] env[63279]: DEBUG nova.compute.manager [req-ecc7cfc6-a414-4deb-b288-8bb91f8befea req-9db4ebfd-7e4d-42c6-86bc-8a68201ce73a service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Refreshing instance network info cache due to event network-changed-2053aee1-c71d-4f3d-99d3-989823b04e65. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2178.247899] env[63279]: DEBUG oslo_concurrency.lockutils [req-ecc7cfc6-a414-4deb-b288-8bb91f8befea req-9db4ebfd-7e4d-42c6-86bc-8a68201ce73a service nova] Acquiring lock "refresh_cache-75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2178.247959] env[63279]: DEBUG oslo_concurrency.lockutils [req-ecc7cfc6-a414-4deb-b288-8bb91f8befea req-9db4ebfd-7e4d-42c6-86bc-8a68201ce73a service nova] Acquired lock "refresh_cache-75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2178.248222] env[63279]: DEBUG nova.network.neutron [req-ecc7cfc6-a414-4deb-b288-8bb91f8befea req-9db4ebfd-7e4d-42c6-86bc-8a68201ce73a service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Refreshing network info cache for port 2053aee1-c71d-4f3d-99d3-989823b04e65 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2178.343649] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2178.344762] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ac11f2-ed04-4cad-9cf4-92abb2b64f12 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.357161] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2178.357435] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c282fe54-753d-42e7-9127-f4c0e99e6805 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.448898] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1405739c-259b-48ca-9be2-dbf0112e60fc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.459747] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c6da46-f9c3-4e21-bd83-61018d024e19 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.495028] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f05f8a0-686b-49fb-b9ff-67c24145c47c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.502808] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6bdb0de-39b1-47e8-ae6f-745c84163fe3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.516765] env[63279]: DEBUG nova.compute.provider_tree [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2178.559501] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2178.560541] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2178.560541] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleting the datastore file [datastore1] d6e40dbc-f20e-4164-b460-18de6ea72906 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2178.560541] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3cf614ff-cbb6-41a9-8ac8-9239c9ce1201 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.567198] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2178.567198] env[63279]: value = "task-2087697" [ 2178.567198] env[63279]: _type = "Task" [ 2178.567198] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.575774] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087697, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.631749] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2178.631749] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2178.631908] env[63279]: DEBUG nova.network.neutron [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2178.694021] env[63279]: DEBUG nova.network.neutron [req-c50600c0-4866-4d4e-9cdc-a0ce52ea3f07 req-2fe29543-5db2-4bdc-9ef2-eebdee7a9578 service nova] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updated VIF entry in instance network info cache for port 55a22eb2-68fd-4cc1-8372-6fed483f16d0. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2178.694505] env[63279]: DEBUG nova.network.neutron [req-c50600c0-4866-4d4e-9cdc-a0ce52ea3f07 req-2fe29543-5db2-4bdc-9ef2-eebdee7a9578 service nova] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance_info_cache with network_info: [{"id": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "address": "fa:16:3e:6d:af:2f", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55a22eb2-68", "ovs_interfaceid": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2178.822379] env[63279]: DEBUG nova.network.neutron [-] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2178.869850] env[63279]: DEBUG nova.network.neutron [req-ecc7cfc6-a414-4deb-b288-8bb91f8befea req-9db4ebfd-7e4d-42c6-86bc-8a68201ce73a service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2179.021012] env[63279]: DEBUG nova.scheduler.client.report [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2179.077565] env[63279]: DEBUG oslo_vmware.api [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087697, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13356} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2179.077918] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2179.078219] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2179.078486] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2179.111526] env[63279]: INFO nova.scheduler.client.report [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleted allocations for instance d6e40dbc-f20e-4164-b460-18de6ea72906 [ 2179.143698] env[63279]: DEBUG nova.network.neutron [req-ecc7cfc6-a414-4deb-b288-8bb91f8befea req-9db4ebfd-7e4d-42c6-86bc-8a68201ce73a service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2179.197808] env[63279]: DEBUG oslo_concurrency.lockutils [req-c50600c0-4866-4d4e-9cdc-a0ce52ea3f07 req-2fe29543-5db2-4bdc-9ef2-eebdee7a9578 service nova] Releasing lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2179.198098] env[63279]: DEBUG nova.compute.manager [req-c50600c0-4866-4d4e-9cdc-a0ce52ea3f07 req-2fe29543-5db2-4bdc-9ef2-eebdee7a9578 service nova] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Received event network-vif-deleted-87fce8ba-c254-4a0f-b9c6-e1db73a6ae03 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2179.311841] env[63279]: DEBUG oslo_concurrency.lockutils [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.311841] env[63279]: DEBUG oslo_concurrency.lockutils [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.311841] env[63279]: INFO nova.compute.manager [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Shelving [ 2179.329333] env[63279]: INFO nova.compute.manager [-] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Took 2.54 seconds to deallocate network for instance. [ 2179.419689] env[63279]: DEBUG nova.compute.manager [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Received event network-vif-unplugged-374ef31e-8b37-4d61-8e90-7ac4e4279a08 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2179.419689] env[63279]: DEBUG oslo_concurrency.lockutils [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] Acquiring lock "d6e40dbc-f20e-4164-b460-18de6ea72906-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.419689] env[63279]: DEBUG oslo_concurrency.lockutils [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] Lock "d6e40dbc-f20e-4164-b460-18de6ea72906-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.419689] env[63279]: DEBUG oslo_concurrency.lockutils [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] Lock "d6e40dbc-f20e-4164-b460-18de6ea72906-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.419689] env[63279]: DEBUG nova.compute.manager [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] No waiting events found dispatching network-vif-unplugged-374ef31e-8b37-4d61-8e90-7ac4e4279a08 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2179.419689] env[63279]: WARNING nova.compute.manager [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Received unexpected event network-vif-unplugged-374ef31e-8b37-4d61-8e90-7ac4e4279a08 for instance with vm_state shelved_offloaded and task_state None. [ 2179.420078] env[63279]: DEBUG nova.compute.manager [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Received event network-vif-deleted-8383ed07-21e7-43e0-82a2-0afc179ca995 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2179.420078] env[63279]: DEBUG nova.compute.manager [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Received event network-changed-374ef31e-8b37-4d61-8e90-7ac4e4279a08 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2179.420254] env[63279]: DEBUG nova.compute.manager [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Refreshing instance network info cache due to event network-changed-374ef31e-8b37-4d61-8e90-7ac4e4279a08. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2179.420423] env[63279]: DEBUG oslo_concurrency.lockutils [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] Acquiring lock "refresh_cache-d6e40dbc-f20e-4164-b460-18de6ea72906" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2179.420669] env[63279]: DEBUG oslo_concurrency.lockutils [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] Acquired lock "refresh_cache-d6e40dbc-f20e-4164-b460-18de6ea72906" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2179.420669] env[63279]: DEBUG nova.network.neutron [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Refreshing network info cache for port 374ef31e-8b37-4d61-8e90-7ac4e4279a08 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2179.526468] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.414s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.528667] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.139s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.528901] env[63279]: DEBUG nova.objects.instance [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lazy-loading 'resources' on Instance uuid 4acfb474-d861-467a-983c-0dd5641e66f3 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2179.558097] env[63279]: INFO nova.scheduler.client.report [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Deleted allocations for instance c8b42e3b-b841-4b79-a4f3-ef62577d4902 [ 2179.616925] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.649927] env[63279]: DEBUG oslo_concurrency.lockutils [req-ecc7cfc6-a414-4deb-b288-8bb91f8befea req-9db4ebfd-7e4d-42c6-86bc-8a68201ce73a service nova] Releasing lock "refresh_cache-75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2179.713469] env[63279]: DEBUG nova.network.neutron [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updating instance_info_cache with network_info: [{"id": "5a61eb6f-d235-4f00-a65b-76f20c2c3d8d", "address": "fa:16:3e:b2:2a:f4", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a61eb6f-d2", "ovs_interfaceid": "5a61eb6f-d235-4f00-a65b-76f20c2c3d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2179.848382] env[63279]: DEBUG oslo_concurrency.lockutils [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.872246] env[63279]: DEBUG nova.network.neutron [-] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2180.070286] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1cd07031-a029-4d28-82f1-ec2398e9fd52 tempest-ServersTestJSON-1697234190 tempest-ServersTestJSON-1697234190-project-member] Lock "c8b42e3b-b841-4b79-a4f3-ef62577d4902" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.463s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.217866] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2180.301713] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea1901c-9aa7-4234-a35b-aad0eea4e09c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.314249] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bba2126-dd7f-46bf-a60d-e81db68e2225 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.354142] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2180.354834] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89aa92fc-818c-40db-90b8-c07c1c89369b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.357276] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc2966d-e886-40dd-81f9-2d4b7591f1fd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.366491] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5622a4-8c7f-45a4-a421-da42257d8dea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.373368] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2180.373368] env[63279]: value = "task-2087698" [ 2180.373368] env[63279]: _type = "Task" [ 2180.373368] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2180.386777] env[63279]: DEBUG nova.network.neutron [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Updated VIF entry in instance network info cache for port 374ef31e-8b37-4d61-8e90-7ac4e4279a08. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2180.386777] env[63279]: DEBUG nova.network.neutron [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Updating instance_info_cache with network_info: [{"id": "374ef31e-8b37-4d61-8e90-7ac4e4279a08", "address": "fa:16:3e:0b:c3:96", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": null, "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap374ef31e-8b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2180.388309] env[63279]: INFO nova.compute.manager [-] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Took 2.43 seconds to deallocate network for instance. [ 2180.390280] env[63279]: DEBUG nova.compute.provider_tree [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2180.406193] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087698, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.440510] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93deaab9-b789-400a-b6f9-ed2ce702cd53 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "d6e40dbc-f20e-4164-b460-18de6ea72906" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2180.464822] env[63279]: DEBUG nova.compute.manager [req-63e5b7d6-5b4d-45d9-853b-7d70f2ff1c91 req-34884388-2485-44ca-be60-5c88d2c37de4 service nova] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Received event network-vif-deleted-d609f907-0c7e-4293-8b21-721e712e8dc2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2180.562140] env[63279]: DEBUG nova.network.neutron [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Successfully updated port: 8f12bb0d-eec1-4c21-b319-372b37e319ca {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2180.632114] env[63279]: DEBUG nova.network.neutron [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Successfully updated port: 06151b64-defb-4dca-a89e-f4d66bcbeb62 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2180.885630] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087698, 'name': PowerOffVM_Task, 'duration_secs': 0.253777} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.885630] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2180.886393] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6e237a-8d63-444e-a325-8f022493e191 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.909819] env[63279]: DEBUG oslo_concurrency.lockutils [req-c1872d09-60a6-4d53-80ef-bbc2851feff9 req-8df039dc-43f5-499f-a8d5-8588c8031741 service nova] Releasing lock "refresh_cache-d6e40dbc-f20e-4164-b460-18de6ea72906" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2180.910774] env[63279]: DEBUG nova.scheduler.client.report [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2180.914144] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Acquiring lock "861e4118-6134-40cf-91cb-865b6ee9f347" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2180.914413] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Lock "861e4118-6134-40cf-91cb-865b6ee9f347" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2180.914694] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Acquiring lock "861e4118-6134-40cf-91cb-865b6ee9f347-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2180.914926] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Lock "861e4118-6134-40cf-91cb-865b6ee9f347-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2180.915233] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Lock "861e4118-6134-40cf-91cb-865b6ee9f347-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.917303] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2180.918781] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733b9281-638e-4a7e-b244-cb009d16e94c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.922544] env[63279]: INFO nova.compute.manager [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Terminating instance [ 2181.069574] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2181.069760] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2181.069916] env[63279]: DEBUG nova.network.neutron [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2181.136543] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "refresh_cache-75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2181.136543] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquired lock "refresh_cache-75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2181.136762] env[63279]: DEBUG nova.network.neutron [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2181.419360] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.891s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.421720] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.805s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.422431] env[63279]: DEBUG nova.objects.instance [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lazy-loading 'resources' on Instance uuid d6e40dbc-f20e-4164-b460-18de6ea72906 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2181.432767] env[63279]: DEBUG nova.compute.manager [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2181.433008] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2181.433935] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2181.434451] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e337857c-98bb-4650-841f-64ef3d224774 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.436415] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c4bfe9c1-d9ac-405c-b364-b887631ed2f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.444920] env[63279]: DEBUG oslo_vmware.api [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Waiting for the task: (returnval){ [ 2181.444920] env[63279]: value = "task-2087699" [ 2181.444920] env[63279]: _type = "Task" [ 2181.444920] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2181.446423] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2181.446423] env[63279]: value = "task-2087700" [ 2181.446423] env[63279]: _type = "Task" [ 2181.446423] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2181.447337] env[63279]: INFO nova.scheduler.client.report [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleted allocations for instance 4acfb474-d861-467a-983c-0dd5641e66f3 [ 2181.464386] env[63279]: DEBUG nova.compute.manager [req-d791590c-6294-42ac-918f-ff48c835ae6e req-3e49e3b0-0d6a-42b9-97f6-824a055e60e5 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Received event network-vif-plugged-8f12bb0d-eec1-4c21-b319-372b37e319ca {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2181.465151] env[63279]: DEBUG oslo_concurrency.lockutils [req-d791590c-6294-42ac-918f-ff48c835ae6e req-3e49e3b0-0d6a-42b9-97f6-824a055e60e5 service nova] Acquiring lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.465151] env[63279]: DEBUG oslo_concurrency.lockutils [req-d791590c-6294-42ac-918f-ff48c835ae6e req-3e49e3b0-0d6a-42b9-97f6-824a055e60e5 service nova] Lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.465569] env[63279]: DEBUG oslo_concurrency.lockutils [req-d791590c-6294-42ac-918f-ff48c835ae6e req-3e49e3b0-0d6a-42b9-97f6-824a055e60e5 service nova] Lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.465971] env[63279]: DEBUG nova.compute.manager [req-d791590c-6294-42ac-918f-ff48c835ae6e req-3e49e3b0-0d6a-42b9-97f6-824a055e60e5 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] No waiting events found dispatching network-vif-plugged-8f12bb0d-eec1-4c21-b319-372b37e319ca {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2181.466349] env[63279]: WARNING nova.compute.manager [req-d791590c-6294-42ac-918f-ff48c835ae6e req-3e49e3b0-0d6a-42b9-97f6-824a055e60e5 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Received unexpected event network-vif-plugged-8f12bb0d-eec1-4c21-b319-372b37e319ca for instance with vm_state building and task_state spawning. [ 2181.466976] env[63279]: DEBUG nova.compute.manager [req-d791590c-6294-42ac-918f-ff48c835ae6e req-3e49e3b0-0d6a-42b9-97f6-824a055e60e5 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Received event network-changed-8f12bb0d-eec1-4c21-b319-372b37e319ca {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2181.467249] env[63279]: DEBUG nova.compute.manager [req-d791590c-6294-42ac-918f-ff48c835ae6e req-3e49e3b0-0d6a-42b9-97f6-824a055e60e5 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Refreshing instance network info cache due to event network-changed-8f12bb0d-eec1-4c21-b319-372b37e319ca. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2181.468289] env[63279]: DEBUG oslo_concurrency.lockutils [req-d791590c-6294-42ac-918f-ff48c835ae6e req-3e49e3b0-0d6a-42b9-97f6-824a055e60e5 service nova] Acquiring lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2181.473842] env[63279]: DEBUG oslo_vmware.api [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087699, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.480606] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087700, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.645406] env[63279]: DEBUG nova.network.neutron [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2181.678661] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "9b98a316-71da-45fb-b895-553f179fe7d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.679053] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "9b98a316-71da-45fb-b895-553f179fe7d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.709073] env[63279]: DEBUG nova.network.neutron [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2181.737657] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e9b9f5-b0d6-43d5-a8ec-a7550c0e3780 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.761903] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updating instance '736ab3ac-b9e0-4f9e-885b-765ca7a92ed0' progress to 0 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2181.924696] env[63279]: DEBUG nova.objects.instance [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lazy-loading 'numa_topology' on Instance uuid d6e40dbc-f20e-4164-b460-18de6ea72906 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2181.959283] env[63279]: DEBUG oslo_vmware.api [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087699, 'name': PowerOffVM_Task, 'duration_secs': 0.194317} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2181.959972] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2181.960199] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2181.960400] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427687', 'volume_id': '62b49f21-7323-46c2-809d-db035fa9fb8c', 'name': 'volume-62b49f21-7323-46c2-809d-db035fa9fb8c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '861e4118-6134-40cf-91cb-865b6ee9f347', 'attached_at': '', 'detached_at': '', 'volume_id': '62b49f21-7323-46c2-809d-db035fa9fb8c', 'serial': '62b49f21-7323-46c2-809d-db035fa9fb8c'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2181.961175] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f55f2b-52a7-49cf-9b98-05d57213e140 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.971438] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087700, 'name': CreateSnapshot_Task, 'duration_secs': 0.430037} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2181.972297] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b5627f37-2a24-479b-99ea-e01b4f65ca3e tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "4acfb474-d861-467a-983c-0dd5641e66f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.751s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.973576] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2181.974547] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22cd3916-d87c-45c1-9ca2-09efe9e451f1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.997572] env[63279]: DEBUG nova.network.neutron [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Updating instance_info_cache with network_info: [{"id": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "address": "fa:16:3e:68:12:c8", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f12bb0d-ee", "ovs_interfaceid": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2182.002033] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a9462c-b70c-48b6-a296-ba4d1f93068c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.016255] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3536def8-dc15-45bd-bf54-27f073059572 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.035577] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64aa7553-dacb-453c-9e5a-02a06488951d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.053124] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] The volume has not been displaced from its original location: [datastore1] volume-62b49f21-7323-46c2-809d-db035fa9fb8c/volume-62b49f21-7323-46c2-809d-db035fa9fb8c.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2182.058599] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Reconfiguring VM instance instance-0000004e to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2182.059201] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd5d4a45-f8b6-408f-8639-fb0a0bbe07b4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.082717] env[63279]: DEBUG oslo_vmware.api [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Waiting for the task: (returnval){ [ 2182.082717] env[63279]: value = "task-2087701" [ 2182.082717] env[63279]: _type = "Task" [ 2182.082717] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2182.092133] env[63279]: DEBUG oslo_vmware.api [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087701, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.184595] env[63279]: DEBUG nova.compute.manager [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2182.268205] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2182.268508] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23f138bc-e40f-4135-8033-7c245b4b1469 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.275245] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2182.275245] env[63279]: value = "task-2087702" [ 2182.275245] env[63279]: _type = "Task" [ 2182.275245] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2182.283850] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087702, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.427275] env[63279]: DEBUG nova.objects.base [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2182.507391] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2182.507744] env[63279]: DEBUG nova.compute.manager [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Instance network_info: |[{"id": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "address": "fa:16:3e:68:12:c8", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f12bb0d-ee", "ovs_interfaceid": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2182.508061] env[63279]: DEBUG oslo_concurrency.lockutils [req-d791590c-6294-42ac-918f-ff48c835ae6e req-3e49e3b0-0d6a-42b9-97f6-824a055e60e5 service nova] Acquired lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2182.508248] env[63279]: DEBUG nova.network.neutron [req-d791590c-6294-42ac-918f-ff48c835ae6e req-3e49e3b0-0d6a-42b9-97f6-824a055e60e5 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Refreshing network info cache for port 8f12bb0d-eec1-4c21-b319-372b37e319ca {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2182.510337] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:12:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0549820d-5649-40bc-ad6e-9ae27b384d90', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f12bb0d-eec1-4c21-b319-372b37e319ca', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2182.522493] env[63279]: DEBUG oslo.service.loopingcall [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2182.531444] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2182.531726] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2182.535503] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-251f68e0-d8be-4d2b-82be-c08db7e72362 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.538768] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-151c9229-c06d-4eb1-9593-3847a06f45ea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.563250] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2182.563250] env[63279]: value = "task-2087703" [ 2182.563250] env[63279]: _type = "Task" [ 2182.563250] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2182.564939] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2182.564939] env[63279]: value = "task-2087704" [ 2182.564939] env[63279]: _type = "Task" [ 2182.564939] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2182.576016] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087703, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.581121] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087704, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.593965] env[63279]: DEBUG oslo_vmware.api [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087701, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.660386] env[63279]: DEBUG nova.compute.manager [req-cf6342ea-def9-4c05-b757-bf08bb60f2c6 req-829bce00-01ec-4e69-8343-f61fd46ba29c service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Received event network-vif-plugged-06151b64-defb-4dca-a89e-f4d66bcbeb62 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2182.660630] env[63279]: DEBUG oslo_concurrency.lockutils [req-cf6342ea-def9-4c05-b757-bf08bb60f2c6 req-829bce00-01ec-4e69-8343-f61fd46ba29c service nova] Acquiring lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2182.660908] env[63279]: DEBUG oslo_concurrency.lockutils [req-cf6342ea-def9-4c05-b757-bf08bb60f2c6 req-829bce00-01ec-4e69-8343-f61fd46ba29c service nova] Lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2182.661099] env[63279]: DEBUG oslo_concurrency.lockutils [req-cf6342ea-def9-4c05-b757-bf08bb60f2c6 req-829bce00-01ec-4e69-8343-f61fd46ba29c service nova] Lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2182.661488] env[63279]: DEBUG nova.compute.manager [req-cf6342ea-def9-4c05-b757-bf08bb60f2c6 req-829bce00-01ec-4e69-8343-f61fd46ba29c service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] No waiting events found dispatching network-vif-plugged-06151b64-defb-4dca-a89e-f4d66bcbeb62 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2182.661676] env[63279]: WARNING nova.compute.manager [req-cf6342ea-def9-4c05-b757-bf08bb60f2c6 req-829bce00-01ec-4e69-8343-f61fd46ba29c service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Received unexpected event network-vif-plugged-06151b64-defb-4dca-a89e-f4d66bcbeb62 for instance with vm_state building and task_state spawning. [ 2182.661838] env[63279]: DEBUG nova.compute.manager [req-cf6342ea-def9-4c05-b757-bf08bb60f2c6 req-829bce00-01ec-4e69-8343-f61fd46ba29c service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Received event network-changed-06151b64-defb-4dca-a89e-f4d66bcbeb62 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2182.661989] env[63279]: DEBUG nova.compute.manager [req-cf6342ea-def9-4c05-b757-bf08bb60f2c6 req-829bce00-01ec-4e69-8343-f61fd46ba29c service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Refreshing instance network info cache due to event network-changed-06151b64-defb-4dca-a89e-f4d66bcbeb62. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2182.662171] env[63279]: DEBUG oslo_concurrency.lockutils [req-cf6342ea-def9-4c05-b757-bf08bb60f2c6 req-829bce00-01ec-4e69-8343-f61fd46ba29c service nova] Acquiring lock "refresh_cache-75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2182.709758] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2182.746728] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Acquiring lock "27e2917d-3cd0-4ad3-ab65-f85f7d97225f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2182.747186] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Lock "27e2917d-3cd0-4ad3-ab65-f85f7d97225f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2182.771809] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13cbc098-c4eb-489d-bf08-b5d229c312bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.782855] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ecb248-e84c-4b4e-b70b-afe3e2b6fc8a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.791709] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087702, 'name': PowerOffVM_Task, 'duration_secs': 0.225106} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2182.792712] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2182.792968] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updating instance '736ab3ac-b9e0-4f9e-885b-765ca7a92ed0' progress to 17 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2182.829026] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf753ef8-3137-4f10-ba14-40774bd0a0aa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.834516] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14b9f64-6e13-4bea-9fed-898277fe99a9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.852041] env[63279]: DEBUG nova.compute.provider_tree [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2182.880060] env[63279]: DEBUG nova.network.neutron [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Updating instance_info_cache with network_info: [{"id": "f5f17197-3d49-4ae2-a895-9bab23e07301", "address": "fa:16:3e:02:12:0c", "network": {"id": "c70e9607-e52f-42ab-baea-1bc752ec674b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-438199453", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5f17197-3d", "ovs_interfaceid": "f5f17197-3d49-4ae2-a895-9bab23e07301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2053aee1-c71d-4f3d-99d3-989823b04e65", "address": "fa:16:3e:75:53:2b", "network": {"id": "a50f0cec-d79c-4af2-8080-2a93dd18da6a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1626901449", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2053aee1-c7", "ovs_interfaceid": "2053aee1-c71d-4f3d-99d3-989823b04e65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "06151b64-defb-4dca-a89e-f4d66bcbeb62", "address": "fa:16:3e:51:a2:9a", "network": {"id": "c70e9607-e52f-42ab-baea-1bc752ec674b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-438199453", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.46", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06151b64-de", "ovs_interfaceid": "06151b64-defb-4dca-a89e-f4d66bcbeb62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2183.080092] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087703, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.089315] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087704, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.102451] env[63279]: DEBUG oslo_vmware.api [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087701, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.250182] env[63279]: DEBUG nova.compute.manager [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2183.333789] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2183.334065] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2183.334235] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2183.334458] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2183.334643] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2183.334809] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2183.335627] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2183.335907] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2183.336193] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2183.336454] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2183.336741] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2183.343587] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c1ac0bd-d84b-4a31-baa1-05964ba174a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.356796] env[63279]: DEBUG nova.scheduler.client.report [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2183.365903] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "f2a68d73-49d6-4b38-aff1-c2eb850f2ca6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2183.366270] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "f2a68d73-49d6-4b38-aff1-c2eb850f2ca6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2183.366500] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "f2a68d73-49d6-4b38-aff1-c2eb850f2ca6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2183.366740] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "f2a68d73-49d6-4b38-aff1-c2eb850f2ca6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2183.366970] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "f2a68d73-49d6-4b38-aff1-c2eb850f2ca6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2183.368805] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2183.368805] env[63279]: value = "task-2087705" [ 2183.368805] env[63279]: _type = "Task" [ 2183.368805] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.373108] env[63279]: INFO nova.compute.manager [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Terminating instance [ 2183.382443] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Releasing lock "refresh_cache-75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2183.383157] env[63279]: DEBUG nova.compute.manager [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Instance network_info: |[{"id": "f5f17197-3d49-4ae2-a895-9bab23e07301", "address": "fa:16:3e:02:12:0c", "network": {"id": "c70e9607-e52f-42ab-baea-1bc752ec674b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-438199453", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5f17197-3d", "ovs_interfaceid": "f5f17197-3d49-4ae2-a895-9bab23e07301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2053aee1-c71d-4f3d-99d3-989823b04e65", "address": "fa:16:3e:75:53:2b", "network": {"id": "a50f0cec-d79c-4af2-8080-2a93dd18da6a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1626901449", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2053aee1-c7", "ovs_interfaceid": "2053aee1-c71d-4f3d-99d3-989823b04e65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "06151b64-defb-4dca-a89e-f4d66bcbeb62", "address": "fa:16:3e:51:a2:9a", "network": {"id": "c70e9607-e52f-42ab-baea-1bc752ec674b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-438199453", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.46", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06151b64-de", "ovs_interfaceid": "06151b64-defb-4dca-a89e-f4d66bcbeb62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2183.383519] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087705, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.385253] env[63279]: DEBUG oslo_concurrency.lockutils [req-cf6342ea-def9-4c05-b757-bf08bb60f2c6 req-829bce00-01ec-4e69-8343-f61fd46ba29c service nova] Acquired lock "refresh_cache-75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2183.385499] env[63279]: DEBUG nova.network.neutron [req-cf6342ea-def9-4c05-b757-bf08bb60f2c6 req-829bce00-01ec-4e69-8343-f61fd46ba29c service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Refreshing network info cache for port 06151b64-defb-4dca-a89e-f4d66bcbeb62 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2183.386787] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:12:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5f17197-3d49-4ae2-a895-9bab23e07301', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:53:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96fdbb91-eb49-4dbf-b234-5b38503d7589', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2053aee1-c71d-4f3d-99d3-989823b04e65', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:a2:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06151b64-defb-4dca-a89e-f4d66bcbeb62', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2183.400277] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Creating folder: Project (161d1caf722349c188ca8fc647989c05). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2183.401560] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d4a6495-1a5e-42e0-9835-83a0c429d8d0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.413774] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Created folder: Project (161d1caf722349c188ca8fc647989c05) in parent group-v427491. [ 2183.413961] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Creating folder: Instances. Parent ref: group-v427720. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2183.414167] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2454152-8eab-4403-a3e8-ebc546ff5700 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.424371] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Created folder: Instances in parent group-v427720. [ 2183.424909] env[63279]: DEBUG oslo.service.loopingcall [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2183.425388] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2183.428051] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67744b4e-0ab6-41ad-a797-1c2ed3324998 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.458870] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2183.458870] env[63279]: value = "task-2087708" [ 2183.458870] env[63279]: _type = "Task" [ 2183.458870] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.468948] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087708, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.536332] env[63279]: DEBUG nova.network.neutron [req-d791590c-6294-42ac-918f-ff48c835ae6e req-3e49e3b0-0d6a-42b9-97f6-824a055e60e5 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Updated VIF entry in instance network info cache for port 8f12bb0d-eec1-4c21-b319-372b37e319ca. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2183.536332] env[63279]: DEBUG nova.network.neutron [req-d791590c-6294-42ac-918f-ff48c835ae6e req-3e49e3b0-0d6a-42b9-97f6-824a055e60e5 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Updating instance_info_cache with network_info: [{"id": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "address": "fa:16:3e:68:12:c8", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f12bb0d-ee", "ovs_interfaceid": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2183.590403] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087703, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.601142] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087704, 'name': CreateVM_Task, 'duration_secs': 0.775553} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2183.601623] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2183.603653] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2183.603844] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2183.604424] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2183.605605] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dba58e62-b5fc-47cb-83c8-c950c24d91b4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.615079] env[63279]: DEBUG oslo_vmware.api [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087701, 'name': ReconfigVM_Task, 'duration_secs': 1.207821} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2183.618088] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Reconfigured VM instance instance-0000004e to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2183.630677] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c364664a-2fb0-4250-a835-651890728a58 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.648562] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2183.648562] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c76ca2-3b6b-250e-ea59-6e42b3c84b76" [ 2183.648562] env[63279]: _type = "Task" [ 2183.648562] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.657263] env[63279]: DEBUG oslo_vmware.api [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Waiting for the task: (returnval){ [ 2183.657263] env[63279]: value = "task-2087709" [ 2183.657263] env[63279]: _type = "Task" [ 2183.657263] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.670941] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c76ca2-3b6b-250e-ea59-6e42b3c84b76, 'name': SearchDatastore_Task, 'duration_secs': 0.010149} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2183.671934] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2183.672348] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2183.672718] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2183.672944] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2183.673248] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2183.678949] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e68f36e8-2a65-4a87-a809-c853ff825563 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.681753] env[63279]: DEBUG oslo_vmware.api [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087709, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.689622] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2183.689622] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2183.691483] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b98d41ee-ac67-4e46-bb40-be66adbbce30 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.698479] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2183.698479] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52fd2d30-0630-0392-d11f-7f3b3946ff68" [ 2183.698479] env[63279]: _type = "Task" [ 2183.698479] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.710347] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fd2d30-0630-0392-d11f-7f3b3946ff68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.779614] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2183.862348] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.440s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2183.865889] env[63279]: DEBUG oslo_concurrency.lockutils [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.018s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2183.866182] env[63279]: DEBUG nova.objects.instance [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lazy-loading 'resources' on Instance uuid 2f5e22f6-ba70-4848-965b-eb1553115323 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2183.885497] env[63279]: DEBUG nova.compute.manager [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2183.885497] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2183.885497] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087705, 'name': ReconfigVM_Task, 'duration_secs': 0.186446} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2183.885497] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691107ed-f4f1-4123-bf1d-673faf08f3da {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.888662] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updating instance '736ab3ac-b9e0-4f9e-885b-765ca7a92ed0' progress to 33 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2183.899458] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2183.899458] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74a1aba5-5720-4cf4-afe7-b8538a2fe58e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.905941] env[63279]: DEBUG oslo_vmware.api [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2183.905941] env[63279]: value = "task-2087710" [ 2183.905941] env[63279]: _type = "Task" [ 2183.905941] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.916376] env[63279]: DEBUG oslo_vmware.api [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087710, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.970665] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087708, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.041073] env[63279]: DEBUG oslo_concurrency.lockutils [req-d791590c-6294-42ac-918f-ff48c835ae6e req-3e49e3b0-0d6a-42b9-97f6-824a055e60e5 service nova] Releasing lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2184.078620] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087703, 'name': CloneVM_Task, 'duration_secs': 1.276225} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.078983] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Created linked-clone VM from snapshot [ 2184.079932] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4da7a9-8468-40a3-ac13-0e761e937c91 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.090934] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Uploading image f425cf74-2f77-4c7b-99ba-64ff14b01dc6 {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2184.132022] env[63279]: DEBUG oslo_vmware.rw_handles [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2184.132022] env[63279]: value = "vm-427719" [ 2184.132022] env[63279]: _type = "VirtualMachine" [ 2184.132022] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2184.132342] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-84a16bb6-3cd5-4772-90ee-74efb3351239 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.147237] env[63279]: DEBUG oslo_vmware.rw_handles [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lease: (returnval){ [ 2184.147237] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bca73e-e2a6-74cb-ab0a-037f782ac6dc" [ 2184.147237] env[63279]: _type = "HttpNfcLease" [ 2184.147237] env[63279]: } obtained for exporting VM: (result){ [ 2184.147237] env[63279]: value = "vm-427719" [ 2184.147237] env[63279]: _type = "VirtualMachine" [ 2184.147237] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2184.147237] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the lease: (returnval){ [ 2184.147237] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bca73e-e2a6-74cb-ab0a-037f782ac6dc" [ 2184.147237] env[63279]: _type = "HttpNfcLease" [ 2184.147237] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2184.151451] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2184.151451] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bca73e-e2a6-74cb-ab0a-037f782ac6dc" [ 2184.151451] env[63279]: _type = "HttpNfcLease" [ 2184.151451] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2184.156363] env[63279]: DEBUG nova.network.neutron [req-cf6342ea-def9-4c05-b757-bf08bb60f2c6 req-829bce00-01ec-4e69-8343-f61fd46ba29c service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Updated VIF entry in instance network info cache for port 06151b64-defb-4dca-a89e-f4d66bcbeb62. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2184.156829] env[63279]: DEBUG nova.network.neutron [req-cf6342ea-def9-4c05-b757-bf08bb60f2c6 req-829bce00-01ec-4e69-8343-f61fd46ba29c service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Updating instance_info_cache with network_info: [{"id": "f5f17197-3d49-4ae2-a895-9bab23e07301", "address": "fa:16:3e:02:12:0c", "network": {"id": "c70e9607-e52f-42ab-baea-1bc752ec674b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-438199453", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5f17197-3d", "ovs_interfaceid": "f5f17197-3d49-4ae2-a895-9bab23e07301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2053aee1-c71d-4f3d-99d3-989823b04e65", "address": "fa:16:3e:75:53:2b", "network": {"id": "a50f0cec-d79c-4af2-8080-2a93dd18da6a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1626901449", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2053aee1-c7", "ovs_interfaceid": "2053aee1-c71d-4f3d-99d3-989823b04e65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "06151b64-defb-4dca-a89e-f4d66bcbeb62", "address": "fa:16:3e:51:a2:9a", "network": {"id": "c70e9607-e52f-42ab-baea-1bc752ec674b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-438199453", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.46", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06151b64-de", "ovs_interfaceid": "06151b64-defb-4dca-a89e-f4d66bcbeb62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2184.167939] env[63279]: DEBUG oslo_vmware.api [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087709, 'name': ReconfigVM_Task, 'duration_secs': 0.130976} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.168227] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427687', 'volume_id': '62b49f21-7323-46c2-809d-db035fa9fb8c', 'name': 'volume-62b49f21-7323-46c2-809d-db035fa9fb8c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '861e4118-6134-40cf-91cb-865b6ee9f347', 'attached_at': '', 'detached_at': '', 'volume_id': '62b49f21-7323-46c2-809d-db035fa9fb8c', 'serial': '62b49f21-7323-46c2-809d-db035fa9fb8c'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2184.168510] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2184.169277] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5066eb-0bed-4312-8d7b-4b68d886dac5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.177137] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2184.177387] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c96dd3dd-386a-4f44-9d10-93c88cf49312 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.197531] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "14c698c8-7459-4843-bb19-f915742e3e53" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2184.197778] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "14c698c8-7459-4843-bb19-f915742e3e53" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2184.213311] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fd2d30-0630-0392-d11f-7f3b3946ff68, 'name': SearchDatastore_Task, 'duration_secs': 0.009796} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.214651] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3708a7f5-a493-4d1c-b1e5-7a28109190fe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.220994] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2184.220994] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c654c1-a7e2-d178-7b15-c1e8a5304c7b" [ 2184.220994] env[63279]: _type = "Task" [ 2184.220994] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.233115] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c654c1-a7e2-d178-7b15-c1e8a5304c7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.359979] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2184.360250] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2184.360441] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Deleting the datastore file [datastore1] 861e4118-6134-40cf-91cb-865b6ee9f347 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2184.360784] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5c503f0-570c-493f-9b95-947eda53a381 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.367669] env[63279]: DEBUG oslo_vmware.api [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Waiting for the task: (returnval){ [ 2184.367669] env[63279]: value = "task-2087713" [ 2184.367669] env[63279]: _type = "Task" [ 2184.367669] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.381743] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1b4c0c4c-9a01-4ca3-8f97-810c4c0e8a75 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d6e40dbc-f20e-4164-b460-18de6ea72906" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 43.394s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2184.383267] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93deaab9-b789-400a-b6f9-ed2ce702cd53 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d6e40dbc-f20e-4164-b460-18de6ea72906" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 3.943s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2184.383267] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93deaab9-b789-400a-b6f9-ed2ce702cd53 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "d6e40dbc-f20e-4164-b460-18de6ea72906-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2184.383766] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93deaab9-b789-400a-b6f9-ed2ce702cd53 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d6e40dbc-f20e-4164-b460-18de6ea72906-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2184.383766] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93deaab9-b789-400a-b6f9-ed2ce702cd53 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d6e40dbc-f20e-4164-b460-18de6ea72906-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2184.389987] env[63279]: DEBUG oslo_vmware.api [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087713, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.390614] env[63279]: INFO nova.compute.manager [None req-93deaab9-b789-400a-b6f9-ed2ce702cd53 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Terminating instance [ 2184.395807] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2184.396044] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2184.396207] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2184.396389] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2184.396535] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2184.396682] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2184.396886] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2184.397053] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2184.397238] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2184.397464] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2184.397729] env[63279]: DEBUG nova.virt.hardware [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2184.404239] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Reconfiguring VM instance instance-00000050 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2184.408442] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61aba6e2-5f89-45f0-8726-5131d98263c7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.435010] env[63279]: DEBUG oslo_vmware.api [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087710, 'name': PowerOffVM_Task, 'duration_secs': 0.22834} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.436336] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2184.436537] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2184.436826] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2184.436826] env[63279]: value = "task-2087714" [ 2184.436826] env[63279]: _type = "Task" [ 2184.436826] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.437031] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91daa08e-65b1-4204-b914-a36199325e24 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.450676] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087714, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.473485] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087708, 'name': CreateVM_Task, 'duration_secs': 0.571462} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.473688] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2184.474806] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2184.474985] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2184.476473] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2184.479839] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a31ef1e-8d4a-4ee8-8e4d-b363b776d58f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.486722] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2184.486722] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52284109-a040-56b8-5994-8cbe42cd02a5" [ 2184.486722] env[63279]: _type = "Task" [ 2184.486722] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.496977] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52284109-a040-56b8-5994-8cbe42cd02a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.527413] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2184.527535] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2184.527694] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleting the datastore file [datastore1] f2a68d73-49d6-4b38-aff1-c2eb850f2ca6 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2184.528248] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a291978-91d0-453c-872a-0903a174c406 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.534936] env[63279]: DEBUG oslo_vmware.api [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2184.534936] env[63279]: value = "task-2087716" [ 2184.534936] env[63279]: _type = "Task" [ 2184.534936] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.545918] env[63279]: DEBUG oslo_vmware.api [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087716, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.656729] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2184.656729] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bca73e-e2a6-74cb-ab0a-037f782ac6dc" [ 2184.656729] env[63279]: _type = "HttpNfcLease" [ 2184.656729] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2184.656729] env[63279]: DEBUG oslo_vmware.rw_handles [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2184.656729] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bca73e-e2a6-74cb-ab0a-037f782ac6dc" [ 2184.656729] env[63279]: _type = "HttpNfcLease" [ 2184.656729] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2184.657418] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12a1e22-7b69-4aba-8bfb-d02f74d0eba6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.668305] env[63279]: DEBUG oslo_concurrency.lockutils [req-cf6342ea-def9-4c05-b757-bf08bb60f2c6 req-829bce00-01ec-4e69-8343-f61fd46ba29c service nova] Releasing lock "refresh_cache-75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2184.668305] env[63279]: DEBUG oslo_vmware.rw_handles [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521d34e6-dc66-5407-642d-4c5439c4e1a6/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2184.668305] env[63279]: DEBUG oslo_vmware.rw_handles [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521d34e6-dc66-5407-642d-4c5439c4e1a6/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2184.733549] env[63279]: DEBUG nova.compute.manager [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2184.748118] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c654c1-a7e2-d178-7b15-c1e8a5304c7b, 'name': SearchDatastore_Task, 'duration_secs': 0.032185} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.748425] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2184.748817] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed/2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2184.749108] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46d91a29-1cd3-4c94-986e-bf02d095bfc4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.761041] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2184.761041] env[63279]: value = "task-2087717" [ 2184.761041] env[63279]: _type = "Task" [ 2184.761041] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.773184] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087717, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.789084] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1dc2b617-65cc-41c3-8aa3-32a47f7d80bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.813260] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b94e34-6caf-4615-a592-22a53dadb767 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.824253] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39271049-4866-419a-975b-323a738ee5ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.872029] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3e380a-f140-4fec-9263-4e6ba05bf931 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.888039] env[63279]: DEBUG oslo_vmware.api [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Task: {'id': task-2087713, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156475} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.889243] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b97f91d-503d-4b64-a53f-ccaec57e4da0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.893839] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2184.896671] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2184.896671] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2184.896671] env[63279]: INFO nova.compute.manager [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Took 3.46 seconds to destroy the instance on the hypervisor. [ 2184.896671] env[63279]: DEBUG oslo.service.loopingcall [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2184.896671] env[63279]: DEBUG nova.compute.manager [None req-93deaab9-b789-400a-b6f9-ed2ce702cd53 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2184.896671] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-93deaab9-b789-400a-b6f9-ed2ce702cd53 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2184.896671] env[63279]: DEBUG nova.compute.manager [-] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2184.896671] env[63279]: DEBUG nova.network.neutron [-] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2184.898340] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fbe8ff91-d387-439b-99c4-f2081817b1ba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.916714] env[63279]: DEBUG nova.compute.provider_tree [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2184.921066] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bfdeadb-c98c-4020-9c33-b556a3202cdd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.954234] env[63279]: WARNING nova.virt.vmwareapi.vmops [None req-93deaab9-b789-400a-b6f9-ed2ce702cd53 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d6e40dbc-f20e-4164-b460-18de6ea72906 could not be found. [ 2184.954471] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-93deaab9-b789-400a-b6f9-ed2ce702cd53 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2184.954662] env[63279]: INFO nova.compute.manager [None req-93deaab9-b789-400a-b6f9-ed2ce702cd53 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Took 0.06 seconds to destroy the instance on the hypervisor. [ 2184.955093] env[63279]: DEBUG oslo.service.loopingcall [None req-93deaab9-b789-400a-b6f9-ed2ce702cd53 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2184.956903] env[63279]: DEBUG nova.compute.manager [-] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2184.956903] env[63279]: DEBUG nova.network.neutron [-] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2184.962108] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087714, 'name': ReconfigVM_Task, 'duration_secs': 0.203399} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.962108] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Reconfigured VM instance instance-00000050 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2184.962941] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2afd671e-ec25-4add-9d2b-f8c27f1698e9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.990367] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0/736ab3ac-b9e0-4f9e-885b-765ca7a92ed0.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2184.992019] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b09a7716-fa39-4a83-9f91-b75e5368a805 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.013090] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52284109-a040-56b8-5994-8cbe42cd02a5, 'name': SearchDatastore_Task, 'duration_secs': 0.022062} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2185.017182] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2185.017182] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2185.017182] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2185.017182] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2185.017182] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2185.017182] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2185.017182] env[63279]: value = "task-2087718" [ 2185.017182] env[63279]: _type = "Task" [ 2185.017182] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2185.017667] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a7773ea-a4bb-4279-b89c-401874e171a8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.031197] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087718, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.033196] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2185.033492] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2185.034325] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ab35722-f4a5-41c8-8fc7-b1aa611e5bec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.042844] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2185.042844] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bf89b7-f7e0-b276-897d-727824f70281" [ 2185.042844] env[63279]: _type = "Task" [ 2185.042844] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2185.046453] env[63279]: DEBUG oslo_vmware.api [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087716, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198807} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2185.050065] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2185.050065] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2185.050222] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2185.050323] env[63279]: INFO nova.compute.manager [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Took 1.17 seconds to destroy the instance on the hypervisor. [ 2185.051238] env[63279]: DEBUG oslo.service.loopingcall [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2185.051238] env[63279]: DEBUG nova.compute.manager [-] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2185.051238] env[63279]: DEBUG nova.network.neutron [-] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2185.058246] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bf89b7-f7e0-b276-897d-727824f70281, 'name': SearchDatastore_Task, 'duration_secs': 0.008587} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2185.059106] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79f58411-2ec0-4dda-bbeb-2de15cf16693 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.065375] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2185.065375] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]520ef4b5-0091-d123-5e2c-7526a081624e" [ 2185.065375] env[63279]: _type = "Task" [ 2185.065375] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2185.073504] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520ef4b5-0091-d123-5e2c-7526a081624e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.276216] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2185.286162] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087717, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.432981] env[63279]: DEBUG nova.scheduler.client.report [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2185.533942] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087718, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.576746] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520ef4b5-0091-d123-5e2c-7526a081624e, 'name': SearchDatastore_Task, 'duration_secs': 0.009365} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2185.577303] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2185.577594] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5/75f5b49f-14e7-4a8e-a0cb-b955edc13dd5.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2185.577951] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-88d162d5-e584-4a8f-bd03-0f68d015fe2f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.591085] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2185.591085] env[63279]: value = "task-2087719" [ 2185.591085] env[63279]: _type = "Task" [ 2185.591085] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2185.606388] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087719, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.652980] env[63279]: DEBUG nova.compute.manager [req-970db5a2-1362-4c1e-b412-026f7338e815 req-30ed2936-a85e-4564-93e4-14f2ee1dd752 service nova] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Received event network-vif-deleted-036111dc-8280-4649-98b1-f0319d92337f {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2185.653983] env[63279]: INFO nova.compute.manager [req-970db5a2-1362-4c1e-b412-026f7338e815 req-30ed2936-a85e-4564-93e4-14f2ee1dd752 service nova] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Neutron deleted interface 036111dc-8280-4649-98b1-f0319d92337f; detaching it from the instance and deleting it from the info cache [ 2185.654144] env[63279]: DEBUG nova.network.neutron [req-970db5a2-1362-4c1e-b412-026f7338e815 req-30ed2936-a85e-4564-93e4-14f2ee1dd752 service nova] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2185.775932] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087717, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.792611] env[63279]: DEBUG nova.network.neutron [-] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2185.940324] env[63279]: DEBUG oslo_concurrency.lockutils [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.075s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2185.943347] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.026s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2185.943760] env[63279]: DEBUG nova.objects.instance [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'resources' on Instance uuid eca98392-98be-405b-b799-463ef9ee3dc8 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2185.977427] env[63279]: INFO nova.scheduler.client.report [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Deleted allocations for instance 2f5e22f6-ba70-4848-965b-eb1553115323 [ 2186.031973] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087718, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.104808] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087719, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.118530] env[63279]: DEBUG nova.network.neutron [-] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2186.164706] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9d957369-8349-4309-952e-8144b0a76bd1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.176857] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312c5ec0-dd1d-476e-8548-2c40fdc39e0d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.216979] env[63279]: DEBUG nova.compute.manager [req-970db5a2-1362-4c1e-b412-026f7338e815 req-30ed2936-a85e-4564-93e4-14f2ee1dd752 service nova] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Detach interface failed, port_id=036111dc-8280-4649-98b1-f0319d92337f, reason: Instance f2a68d73-49d6-4b38-aff1-c2eb850f2ca6 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2186.278076] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087717, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.262706} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2186.278513] env[63279]: DEBUG nova.network.neutron [-] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2186.279899] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed/2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2186.280225] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2186.280914] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3e33765-14e4-4beb-a01c-4de9d61bb087 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.291295] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2186.291295] env[63279]: value = "task-2087720" [ 2186.291295] env[63279]: _type = "Task" [ 2186.291295] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.297462] env[63279]: INFO nova.compute.manager [-] [instance: d6e40dbc-f20e-4164-b460-18de6ea72906] Took 1.34 seconds to deallocate network for instance. [ 2186.313166] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087720, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.494702] env[63279]: DEBUG oslo_concurrency.lockutils [None req-31ef8cf6-74f0-48ac-b617-703694abb219 tempest-ServersNegativeTestJSON-1533210711 tempest-ServersNegativeTestJSON-1533210711-project-member] Lock "2f5e22f6-ba70-4848-965b-eb1553115323" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.869s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2186.532805] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087718, 'name': ReconfigVM_Task, 'duration_secs': 1.367833} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2186.533074] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0/736ab3ac-b9e0-4f9e-885b-765ca7a92ed0.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2186.534033] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updating instance '736ab3ac-b9e0-4f9e-885b-765ca7a92ed0' progress to 50 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2186.609110] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087719, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.73975} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2186.609457] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5/75f5b49f-14e7-4a8e-a0cb-b955edc13dd5.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2186.609710] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2186.609999] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd715192-5f33-4038-8ea0-4b5e72d5a344 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.620087] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2186.620087] env[63279]: value = "task-2087721" [ 2186.620087] env[63279]: _type = "Task" [ 2186.620087] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.620925] env[63279]: INFO nova.compute.manager [-] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Took 1.57 seconds to deallocate network for instance. [ 2186.639479] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087721, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.774195] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54dae39e-b6ea-4e5e-b43a-39655f2a0a1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.782503] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a3a4d3-a014-4d05-adc1-b56dc8948360 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.786439] env[63279]: INFO nova.compute.manager [-] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Took 1.89 seconds to deallocate network for instance. [ 2186.827737] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0daf0f49-e04b-4b8a-8466-a7a97a122ae1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.834298] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087720, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117982} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2186.835116] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2186.835734] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6538c95-bb1f-43e8-8709-40fe39202ef6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.847143] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9d7d4d-ab30-41ad-9155-59c77fbe2633 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.871703] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed/2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2186.872509] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65e9589a-621a-4328-8b93-100d710c8bc2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.896236] env[63279]: DEBUG nova.compute.provider_tree [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2186.903377] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2186.903377] env[63279]: value = "task-2087722" [ 2186.903377] env[63279]: _type = "Task" [ 2186.903377] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.911874] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087722, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.044638] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2eb5f18-719c-47bd-ac26-48a04d50590c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.067538] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5636814-e468-43f8-89be-1ef7e717bc21 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.085058] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updating instance '736ab3ac-b9e0-4f9e-885b-765ca7a92ed0' progress to 67 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2187.130071] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087721, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106058} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.131157] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2187.132094] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece45478-6141-4635-8909-f2125a3fc209 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.154639] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2187.163863] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5/75f5b49f-14e7-4a8e-a0cb-b955edc13dd5.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2187.164707] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9faaf01-c228-4e3a-98d3-7412ebca34df {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.185621] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2187.185621] env[63279]: value = "task-2087723" [ 2187.185621] env[63279]: _type = "Task" [ 2187.185621] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.196249] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087723, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.357875] env[63279]: INFO nova.compute.manager [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Took 0.57 seconds to detach 1 volumes for instance. [ 2187.359116] env[63279]: DEBUG oslo_concurrency.lockutils [None req-93deaab9-b789-400a-b6f9-ed2ce702cd53 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d6e40dbc-f20e-4164-b460-18de6ea72906" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.976s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2187.360327] env[63279]: DEBUG nova.compute.manager [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Deleting volume: 62b49f21-7323-46c2-809d-db035fa9fb8c {{(pid=63279) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 2187.402929] env[63279]: DEBUG nova.scheduler.client.report [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2187.417851] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087722, 'name': ReconfigVM_Task, 'duration_secs': 0.397452} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.418209] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed/2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2187.419116] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64496ab0-0930-4b2a-9c4c-faf22c0e8dc1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.428537] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2187.428537] env[63279]: value = "task-2087724" [ 2187.428537] env[63279]: _type = "Task" [ 2187.428537] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.438047] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087724, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.691125] env[63279]: DEBUG nova.network.neutron [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Port 5a61eb6f-d235-4f00-a65b-76f20c2c3d8d binding to destination host cpu-1 is already ACTIVE {{(pid=63279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2187.698088] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087723, 'name': ReconfigVM_Task, 'duration_secs': 0.317055} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.698088] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5/75f5b49f-14e7-4a8e-a0cb-b955edc13dd5.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2187.698088] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7bc819aa-a1b6-4043-b671-3c07e06a5e4e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.705022] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2187.705022] env[63279]: value = "task-2087726" [ 2187.705022] env[63279]: _type = "Task" [ 2187.705022] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.716246] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087726, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.753639] env[63279]: DEBUG nova.compute.manager [req-4c61e483-3f24-4389-b53f-ffd9d75cee82 req-06395864-647a-4ea0-8230-da3934ad5e03 service nova] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Received event network-vif-deleted-4361494d-9864-49bc-a792-be9c831d7486 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2187.912154] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.969s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2187.915049] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.205s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2187.918700] env[63279]: INFO nova.compute.claims [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2187.929970] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2187.941418] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087724, 'name': Rename_Task, 'duration_secs': 0.219847} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.941809] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2187.942126] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a904abc-a6c1-48b1-ac7d-cb68b7034a22 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.947887] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2187.947887] env[63279]: value = "task-2087727" [ 2187.947887] env[63279]: _type = "Task" [ 2187.947887] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.952608] env[63279]: INFO nova.scheduler.client.report [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Deleted allocations for instance eca98392-98be-405b-b799-463ef9ee3dc8 [ 2187.966377] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087727, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.226457] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087726, 'name': Rename_Task, 'duration_secs': 0.154582} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.226798] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2188.227191] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46fdd70e-3032-4d21-86e5-9ad5bc004cb9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.235025] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2188.235025] env[63279]: value = "task-2087728" [ 2188.235025] env[63279]: _type = "Task" [ 2188.235025] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.244340] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087728, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.460307] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087727, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.471335] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01bff562-4997-463d-b37e-eeeb7be900cc tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "eca98392-98be-405b-b799-463ef9ee3dc8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.751s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2188.673770] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "f18d3019-bf1f-4519-a824-7ca80458d793" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.673770] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "f18d3019-bf1f-4519-a824-7ca80458d793" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.726835] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.727061] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.727256] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2188.746502] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087728, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.962764] env[63279]: DEBUG oslo_vmware.api [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087727, 'name': PowerOnVM_Task, 'duration_secs': 0.692939} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.962911] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2188.965037] env[63279]: INFO nova.compute.manager [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Took 11.25 seconds to spawn the instance on the hypervisor. [ 2188.965037] env[63279]: DEBUG nova.compute.manager [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2188.965037] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8d7b8e-dd33-4697-bc7d-c138e2717fdc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.176712] env[63279]: DEBUG nova.compute.manager [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2189.239950] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b5b5eb-66a0-41bc-88b8-06f41d94b288 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.257409] env[63279]: DEBUG oslo_vmware.api [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087728, 'name': PowerOnVM_Task, 'duration_secs': 0.590154} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2189.260551] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2189.261266] env[63279]: INFO nova.compute.manager [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Took 23.86 seconds to spawn the instance on the hypervisor. [ 2189.261266] env[63279]: DEBUG nova.compute.manager [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2189.262779] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eba0d44-895e-488b-922d-87057960a1d9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.268169] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99419e4b-5ef7-4b47-bc43-145801ccc89d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.339550] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ed1517-a6f7-4210-a053-3901f911f56c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.345227] env[63279]: INFO nova.compute.manager [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Took 34.02 seconds to build instance. [ 2189.353259] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5eaec46-9889-4b3e-bdb6-a49a63d869c2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.371608] env[63279]: DEBUG nova.compute.provider_tree [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2189.489073] env[63279]: INFO nova.compute.manager [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Took 20.47 seconds to build instance. [ 2189.704809] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.775544] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2189.775544] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2189.775544] env[63279]: DEBUG nova.network.neutron [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2189.846084] env[63279]: DEBUG oslo_concurrency.lockutils [None req-082a3afc-69fa-402f-b3d2-e85c1a1a4788 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.528s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.878301] env[63279]: DEBUG nova.scheduler.client.report [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2189.990927] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2c89b32-678a-48f7-a70f-bc8265b6ba5a tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.981s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2190.310175] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2190.310499] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2190.310751] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2190.311017] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2190.311332] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2190.313878] env[63279]: INFO nova.compute.manager [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Terminating instance [ 2190.385360] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2190.389182] env[63279]: DEBUG nova.compute.manager [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2190.393766] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.614s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2190.396705] env[63279]: INFO nova.compute.claims [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2190.772694] env[63279]: DEBUG nova.network.neutron [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updating instance_info_cache with network_info: [{"id": "5a61eb6f-d235-4f00-a65b-76f20c2c3d8d", "address": "fa:16:3e:b2:2a:f4", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a61eb6f-d2", "ovs_interfaceid": "5a61eb6f-d235-4f00-a65b-76f20c2c3d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2190.823195] env[63279]: DEBUG nova.compute.manager [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2190.823485] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2190.825104] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af34c226-e542-47f9-80b2-e390e7d49a69 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.835062] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2190.835062] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9f8d756-03f4-4513-a19c-0df1292d6fde {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.844419] env[63279]: DEBUG oslo_vmware.api [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2190.844419] env[63279]: value = "task-2087729" [ 2190.844419] env[63279]: _type = "Task" [ 2190.844419] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.857024] env[63279]: DEBUG oslo_vmware.api [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087729, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.903074] env[63279]: DEBUG nova.compute.utils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2190.907525] env[63279]: DEBUG nova.compute.manager [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2190.908273] env[63279]: DEBUG nova.network.neutron [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2190.984918] env[63279]: DEBUG nova.policy [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '591c2211a5f24ffd8d624f966b4ec858', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d9c0a9c34ba408c829c0b50f3592bb2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2191.058220] env[63279]: DEBUG nova.compute.manager [req-10e3f510-a0e2-49df-b09d-6a8913e4a30f req-bb4b2b61-49a9-43bc-9159-08758013165d service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Received event network-changed-8f12bb0d-eec1-4c21-b319-372b37e319ca {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2191.058402] env[63279]: DEBUG nova.compute.manager [req-10e3f510-a0e2-49df-b09d-6a8913e4a30f req-bb4b2b61-49a9-43bc-9159-08758013165d service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Refreshing instance network info cache due to event network-changed-8f12bb0d-eec1-4c21-b319-372b37e319ca. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2191.058628] env[63279]: DEBUG oslo_concurrency.lockutils [req-10e3f510-a0e2-49df-b09d-6a8913e4a30f req-bb4b2b61-49a9-43bc-9159-08758013165d service nova] Acquiring lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2191.058774] env[63279]: DEBUG oslo_concurrency.lockutils [req-10e3f510-a0e2-49df-b09d-6a8913e4a30f req-bb4b2b61-49a9-43bc-9159-08758013165d service nova] Acquired lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2191.058933] env[63279]: DEBUG nova.network.neutron [req-10e3f510-a0e2-49df-b09d-6a8913e4a30f req-bb4b2b61-49a9-43bc-9159-08758013165d service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Refreshing network info cache for port 8f12bb0d-eec1-4c21-b319-372b37e319ca {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2191.275971] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2191.306451] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2191.306833] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2191.354337] env[63279]: DEBUG nova.network.neutron [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Successfully created port: 162a4844-743a-4cba-b137-f35170a3d072 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2191.361617] env[63279]: DEBUG oslo_vmware.api [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087729, 'name': PowerOffVM_Task, 'duration_secs': 0.199229} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.362183] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2191.362556] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2191.362993] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63683417-2a5d-4a17-93f3-64ed59e815c1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.410860] env[63279]: DEBUG nova.compute.manager [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2191.723019] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80d354d-118f-4918-8be3-58a02e7706f9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.733112] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af20c94b-6cf1-481a-82ad-4831a4c0188d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.774213] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ec6680-eb10-4370-86f8-545df02b881d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.785396] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254e07b1-1153-4ffb-88dc-b48c081167fa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.806890] env[63279]: DEBUG nova.compute.provider_tree [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2191.809604] env[63279]: DEBUG nova.compute.manager [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2191.813541] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc79b0c-abc1-4556-9eb3-769d3b7ddb50 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.839885] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def14283-02dd-4e91-8d32-717eb125f2d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.847647] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updating instance '736ab3ac-b9e0-4f9e-885b-765ca7a92ed0' progress to 83 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2191.963847] env[63279]: DEBUG nova.network.neutron [req-10e3f510-a0e2-49df-b09d-6a8913e4a30f req-bb4b2b61-49a9-43bc-9159-08758013165d service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Updated VIF entry in instance network info cache for port 8f12bb0d-eec1-4c21-b319-372b37e319ca. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2191.964227] env[63279]: DEBUG nova.network.neutron [req-10e3f510-a0e2-49df-b09d-6a8913e4a30f req-bb4b2b61-49a9-43bc-9159-08758013165d service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Updating instance_info_cache with network_info: [{"id": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "address": "fa:16:3e:68:12:c8", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f12bb0d-ee", "ovs_interfaceid": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2192.323030] env[63279]: DEBUG nova.scheduler.client.report [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2192.356189] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2192.357513] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2192.358041] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8d49a97-8e5d-4c9b-9f7d-e39eb85e5768 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.369445] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2192.369445] env[63279]: value = "task-2087731" [ 2192.369445] env[63279]: _type = "Task" [ 2192.369445] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2192.378737] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087731, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.422969] env[63279]: DEBUG nova.compute.manager [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2192.459395] env[63279]: DEBUG nova.virt.hardware [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2192.459395] env[63279]: DEBUG nova.virt.hardware [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2192.459395] env[63279]: DEBUG nova.virt.hardware [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2192.459395] env[63279]: DEBUG nova.virt.hardware [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2192.459395] env[63279]: DEBUG nova.virt.hardware [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2192.459395] env[63279]: DEBUG nova.virt.hardware [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2192.459721] env[63279]: DEBUG nova.virt.hardware [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2192.459900] env[63279]: DEBUG nova.virt.hardware [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2192.460129] env[63279]: DEBUG nova.virt.hardware [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2192.462198] env[63279]: DEBUG nova.virt.hardware [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2192.462440] env[63279]: DEBUG nova.virt.hardware [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2192.463452] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d9da14-f8b6-4dbc-993e-85b0459ec7ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.469069] env[63279]: DEBUG oslo_concurrency.lockutils [req-10e3f510-a0e2-49df-b09d-6a8913e4a30f req-bb4b2b61-49a9-43bc-9159-08758013165d service nova] Releasing lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2192.473817] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04b3217-a054-4203-8d5a-2333830e424a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.833237] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.439s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2192.833865] env[63279]: DEBUG nova.compute.manager [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2192.836993] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.566s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2192.839128] env[63279]: INFO nova.compute.claims [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2192.879996] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087731, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.344542] env[63279]: DEBUG nova.compute.utils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2193.350966] env[63279]: DEBUG nova.compute.manager [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2193.350966] env[63279]: DEBUG nova.network.neutron [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2193.384837] env[63279]: DEBUG oslo_vmware.api [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087731, 'name': PowerOnVM_Task, 'duration_secs': 0.635142} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2193.385192] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2193.385430] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d18390d-60e2-4c12-b76a-90c1efe64631 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updating instance '736ab3ac-b9e0-4f9e-885b-765ca7a92ed0' progress to 100 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2193.409931] env[63279]: DEBUG nova.policy [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd8ec832ba3f4f20ad6e6d1c9f6d2bc5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '421b189f81304db9b91f30c710dc2f30', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2193.776390] env[63279]: DEBUG nova.network.neutron [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Successfully created port: 0f4606f9-e916-43fe-8ad4-57247bfb98a2 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2193.788347] env[63279]: DEBUG oslo_vmware.rw_handles [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521d34e6-dc66-5407-642d-4c5439c4e1a6/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2193.789342] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99375dc7-da58-4f99-947d-aae23e4a7f62 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.795728] env[63279]: DEBUG oslo_vmware.rw_handles [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521d34e6-dc66-5407-642d-4c5439c4e1a6/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2193.795991] env[63279]: ERROR oslo_vmware.rw_handles [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521d34e6-dc66-5407-642d-4c5439c4e1a6/disk-0.vmdk due to incomplete transfer. [ 2193.796252] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-984c40d2-9c04-4ef3-9b55-24c67cd40c25 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.803789] env[63279]: DEBUG oslo_vmware.rw_handles [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521d34e6-dc66-5407-642d-4c5439c4e1a6/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2193.803958] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Uploaded image f425cf74-2f77-4c7b-99ba-64ff14b01dc6 to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2193.806658] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2193.807084] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-be3e4198-ef16-4e13-a90d-6f6c0a30e064 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.814999] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2193.814999] env[63279]: value = "task-2087732" [ 2193.814999] env[63279]: _type = "Task" [ 2193.814999] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2193.825018] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087732, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.835241] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "69b3269a-2ba3-4f5f-a29c-62518c93da3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2193.835492] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "69b3269a-2ba3-4f5f-a29c-62518c93da3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2193.849277] env[63279]: DEBUG nova.compute.manager [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2194.142274] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8986247-5e29-4457-9680-dc2905a81a3b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.150838] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8e8925-d339-40e9-bd83-c82abc1f06c9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.182682] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6838a3-254d-4d6c-8530-a1b46be48347 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.190980] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7e81eb-beb9-4a13-add4-b4c06335b132 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.204431] env[63279]: DEBUG nova.compute.provider_tree [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2194.324755] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087732, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.337415] env[63279]: DEBUG nova.compute.manager [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2194.713450] env[63279]: DEBUG nova.scheduler.client.report [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2194.824857] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087732, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.861430] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2194.862638] env[63279]: DEBUG nova.compute.manager [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2194.887815] env[63279]: DEBUG nova.virt.hardware [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2194.888099] env[63279]: DEBUG nova.virt.hardware [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2194.888295] env[63279]: DEBUG nova.virt.hardware [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2194.888556] env[63279]: DEBUG nova.virt.hardware [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2194.888731] env[63279]: DEBUG nova.virt.hardware [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2194.888950] env[63279]: DEBUG nova.virt.hardware [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2194.889194] env[63279]: DEBUG nova.virt.hardware [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2194.889362] env[63279]: DEBUG nova.virt.hardware [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2194.889534] env[63279]: DEBUG nova.virt.hardware [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2194.889779] env[63279]: DEBUG nova.virt.hardware [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2194.889979] env[63279]: DEBUG nova.virt.hardware [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2194.891102] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317308dd-2758-42ac-bbae-e2a9e96c4abf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.899609] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bceada0c-2301-4a7a-987f-407589a3bf06 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.219264] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.382s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2195.219896] env[63279]: DEBUG nova.compute.manager [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2195.223194] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.069s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.223641] env[63279]: DEBUG nova.objects.instance [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lazy-loading 'resources' on Instance uuid f2a68d73-49d6-4b38-aff1-c2eb850f2ca6 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2195.272297] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2195.272297] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2195.272297] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Deleting the datastore file [datastore1] 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2195.272471] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-952ea042-f0b8-43e4-8ac1-66ae19db9887 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.280440] env[63279]: DEBUG oslo_vmware.api [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2195.280440] env[63279]: value = "task-2087733" [ 2195.280440] env[63279]: _type = "Task" [ 2195.280440] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2195.288890] env[63279]: DEBUG oslo_vmware.api [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087733, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.325552] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087732, 'name': Destroy_Task, 'duration_secs': 1.509486} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2195.325917] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Destroyed the VM [ 2195.326186] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2195.326449] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c0c5e438-503a-42db-b8cf-4a8106ce35c8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.332476] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2195.332476] env[63279]: value = "task-2087734" [ 2195.332476] env[63279]: _type = "Task" [ 2195.332476] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2195.341952] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087734, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.730831] env[63279]: DEBUG nova.compute.utils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2195.732650] env[63279]: DEBUG nova.compute.manager [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2195.733112] env[63279]: DEBUG nova.network.neutron [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2195.780044] env[63279]: DEBUG nova.policy [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17fbc0127a5944ac933232873f282980', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '77ee2145dda94e2b85eeb7379ed98e26', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2195.793178] env[63279]: DEBUG oslo_vmware.api [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087733, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240474} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2195.795478] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2195.795680] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2195.796010] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2195.796677] env[63279]: INFO nova.compute.manager [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Took 4.97 seconds to destroy the instance on the hypervisor. [ 2195.796949] env[63279]: DEBUG oslo.service.loopingcall [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2195.798242] env[63279]: DEBUG nova.compute.manager [-] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2195.798242] env[63279]: DEBUG nova.network.neutron [-] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2195.846095] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087734, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.959143] env[63279]: DEBUG oslo_concurrency.lockutils [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.959421] env[63279]: DEBUG oslo_concurrency.lockutils [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.959605] env[63279]: DEBUG nova.compute.manager [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Going to confirm migration 4 {{(pid=63279) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 2196.002645] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aaaccca-fc03-478f-9027-23947e861fe9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.012308] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d7a27b-9422-40f3-acd5-ab944fcf957a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.049292] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc262b57-d4ff-439e-83b3-1ea33276ff0a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.056726] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b46988-d89c-4d2a-be2c-e1939a8ed4cb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.070070] env[63279]: DEBUG nova.compute.provider_tree [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2196.243065] env[63279]: DEBUG nova.compute.manager [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2196.294022] env[63279]: DEBUG nova.network.neutron [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Successfully created port: 41a761e0-23d6-4305-8d20-d999d3f5b902 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2196.343669] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087734, 'name': RemoveSnapshot_Task, 'duration_secs': 0.833828} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2196.343888] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2196.344209] env[63279]: DEBUG nova.compute.manager [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2196.345183] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27136df8-5449-4705-9692-32b8fe8cdadd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.551804] env[63279]: DEBUG oslo_concurrency.lockutils [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2196.552198] env[63279]: DEBUG oslo_concurrency.lockutils [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2196.552509] env[63279]: DEBUG nova.network.neutron [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2196.552828] env[63279]: DEBUG nova.objects.instance [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lazy-loading 'info_cache' on Instance uuid 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2196.576417] env[63279]: DEBUG nova.scheduler.client.report [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2196.649749] env[63279]: DEBUG nova.compute.manager [req-63874426-a6f0-4f39-beee-0689f3204721 req-6a50eb65-ad35-4e9c-8222-fed85ccb0924 service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Received event network-vif-deleted-2053aee1-c71d-4f3d-99d3-989823b04e65 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2196.650333] env[63279]: INFO nova.compute.manager [req-63874426-a6f0-4f39-beee-0689f3204721 req-6a50eb65-ad35-4e9c-8222-fed85ccb0924 service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Neutron deleted interface 2053aee1-c71d-4f3d-99d3-989823b04e65; detaching it from the instance and deleting it from the info cache [ 2196.650333] env[63279]: DEBUG nova.network.neutron [req-63874426-a6f0-4f39-beee-0689f3204721 req-6a50eb65-ad35-4e9c-8222-fed85ccb0924 service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Updating instance_info_cache with network_info: [{"id": "f5f17197-3d49-4ae2-a895-9bab23e07301", "address": "fa:16:3e:02:12:0c", "network": {"id": "c70e9607-e52f-42ab-baea-1bc752ec674b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-438199453", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5f17197-3d", "ovs_interfaceid": "f5f17197-3d49-4ae2-a895-9bab23e07301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "06151b64-defb-4dca-a89e-f4d66bcbeb62", "address": "fa:16:3e:51:a2:9a", "network": {"id": "c70e9607-e52f-42ab-baea-1bc752ec674b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-438199453", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.46", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06151b64-de", "ovs_interfaceid": "06151b64-defb-4dca-a89e-f4d66bcbeb62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2196.838444] env[63279]: DEBUG nova.compute.manager [req-37917900-7fff-4663-b499-f3b4641f6637 req-85ec4175-4c83-4b1a-b96c-1531f6b37bb1 service nova] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Received event network-vif-plugged-0f4606f9-e916-43fe-8ad4-57247bfb98a2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2196.838444] env[63279]: DEBUG oslo_concurrency.lockutils [req-37917900-7fff-4663-b499-f3b4641f6637 req-85ec4175-4c83-4b1a-b96c-1531f6b37bb1 service nova] Acquiring lock "27e2917d-3cd0-4ad3-ab65-f85f7d97225f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2196.838444] env[63279]: DEBUG oslo_concurrency.lockutils [req-37917900-7fff-4663-b499-f3b4641f6637 req-85ec4175-4c83-4b1a-b96c-1531f6b37bb1 service nova] Lock "27e2917d-3cd0-4ad3-ab65-f85f7d97225f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2196.839040] env[63279]: DEBUG oslo_concurrency.lockutils [req-37917900-7fff-4663-b499-f3b4641f6637 req-85ec4175-4c83-4b1a-b96c-1531f6b37bb1 service nova] Lock "27e2917d-3cd0-4ad3-ab65-f85f7d97225f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.839040] env[63279]: DEBUG nova.compute.manager [req-37917900-7fff-4663-b499-f3b4641f6637 req-85ec4175-4c83-4b1a-b96c-1531f6b37bb1 service nova] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] No waiting events found dispatching network-vif-plugged-0f4606f9-e916-43fe-8ad4-57247bfb98a2 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2196.839040] env[63279]: WARNING nova.compute.manager [req-37917900-7fff-4663-b499-f3b4641f6637 req-85ec4175-4c83-4b1a-b96c-1531f6b37bb1 service nova] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Received unexpected event network-vif-plugged-0f4606f9-e916-43fe-8ad4-57247bfb98a2 for instance with vm_state building and task_state spawning. [ 2196.863197] env[63279]: INFO nova.compute.manager [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Shelve offloading [ 2196.931608] env[63279]: DEBUG nova.network.neutron [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Successfully updated port: 0f4606f9-e916-43fe-8ad4-57247bfb98a2 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2197.045773] env[63279]: DEBUG nova.network.neutron [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Successfully updated port: 162a4844-743a-4cba-b137-f35170a3d072 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2197.085751] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.862s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2197.090427] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.160s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2197.091400] env[63279]: DEBUG nova.objects.instance [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Lazy-loading 'resources' on Instance uuid 861e4118-6134-40cf-91cb-865b6ee9f347 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2197.111224] env[63279]: INFO nova.scheduler.client.report [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleted allocations for instance f2a68d73-49d6-4b38-aff1-c2eb850f2ca6 [ 2197.154849] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e192c02f-832c-4600-99ee-4218268c75d4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.166910] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2845c354-07bc-4d7b-9732-a890130c7241 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.194658] env[63279]: DEBUG nova.compute.manager [req-63874426-a6f0-4f39-beee-0689f3204721 req-6a50eb65-ad35-4e9c-8222-fed85ccb0924 service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Detach interface failed, port_id=2053aee1-c71d-4f3d-99d3-989823b04e65, reason: Instance 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2197.254780] env[63279]: DEBUG nova.compute.manager [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2197.279050] env[63279]: DEBUG nova.virt.hardware [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2197.279298] env[63279]: DEBUG nova.virt.hardware [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2197.279451] env[63279]: DEBUG nova.virt.hardware [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2197.279702] env[63279]: DEBUG nova.virt.hardware [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2197.279877] env[63279]: DEBUG nova.virt.hardware [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2197.280044] env[63279]: DEBUG nova.virt.hardware [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2197.280260] env[63279]: DEBUG nova.virt.hardware [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2197.280421] env[63279]: DEBUG nova.virt.hardware [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2197.280597] env[63279]: DEBUG nova.virt.hardware [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2197.280867] env[63279]: DEBUG nova.virt.hardware [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2197.281122] env[63279]: DEBUG nova.virt.hardware [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2197.281969] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ba68f9-85dc-42d7-90dc-b95d25ea6d62 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.292891] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4287426b-93ec-44a4-8eac-3ec3b910fef5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.370024] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2197.370281] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a591e6db-f5af-486e-9a61-14fcfbb23ba6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.377839] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2197.377839] env[63279]: value = "task-2087735" [ 2197.377839] env[63279]: _type = "Task" [ 2197.377839] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2197.385535] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087735, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2197.431765] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Acquiring lock "refresh_cache-27e2917d-3cd0-4ad3-ab65-f85f7d97225f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2197.431957] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Acquired lock "refresh_cache-27e2917d-3cd0-4ad3-ab65-f85f7d97225f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2197.432141] env[63279]: DEBUG nova.network.neutron [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2197.547936] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "refresh_cache-9b98a316-71da-45fb-b895-553f179fe7d9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2197.548122] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquired lock "refresh_cache-9b98a316-71da-45fb-b895-553f179fe7d9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2197.548278] env[63279]: DEBUG nova.network.neutron [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2197.594636] env[63279]: DEBUG nova.network.neutron [-] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2197.620789] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f5adef54-fbc0-48f2-bee2-87da33052f2d tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "f2a68d73-49d6-4b38-aff1-c2eb850f2ca6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.254s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2197.815078] env[63279]: DEBUG nova.network.neutron [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updating instance_info_cache with network_info: [{"id": "5a61eb6f-d235-4f00-a65b-76f20c2c3d8d", "address": "fa:16:3e:b2:2a:f4", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a61eb6f-d2", "ovs_interfaceid": "5a61eb6f-d235-4f00-a65b-76f20c2c3d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2197.824178] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afda14ef-f489-4fe7-a95a-10f53f210a49 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.833529] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5076fff-ba80-422e-97a3-07acbf164ea4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.864479] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78aa8b37-fce0-4b0e-83aa-5388e727d978 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.876496] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895de4e4-82f1-42a6-82f0-e95e524f7ada {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.896797] env[63279]: DEBUG nova.compute.provider_tree [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2197.901782] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] VM already powered off {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2197.901991] env[63279]: DEBUG nova.compute.manager [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2197.902722] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9370e2bf-d827-4db0-b308-21f21b80b23c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.910106] env[63279]: DEBUG oslo_concurrency.lockutils [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2197.910106] env[63279]: DEBUG oslo_concurrency.lockutils [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2197.910260] env[63279]: DEBUG nova.network.neutron [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2197.972254] env[63279]: DEBUG nova.network.neutron [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2198.095972] env[63279]: DEBUG nova.network.neutron [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2198.099343] env[63279]: INFO nova.compute.manager [-] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Took 2.30 seconds to deallocate network for instance. [ 2198.210813] env[63279]: DEBUG nova.network.neutron [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Updating instance_info_cache with network_info: [{"id": "0f4606f9-e916-43fe-8ad4-57247bfb98a2", "address": "fa:16:3e:2b:f5:46", "network": {"id": "5e9abed5-377b-4644-a91c-52b6e1cc7c00", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-235825661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "421b189f81304db9b91f30c710dc2f30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f4606f9-e9", "ovs_interfaceid": "0f4606f9-e916-43fe-8ad4-57247bfb98a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2198.285271] env[63279]: DEBUG nova.network.neutron [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Updating instance_info_cache with network_info: [{"id": "162a4844-743a-4cba-b137-f35170a3d072", "address": "fa:16:3e:a1:38:39", "network": {"id": "7b4d0149-03c5-4c40-ba16-d705499cd558", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1026068065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9c0a9c34ba408c829c0b50f3592bb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap162a4844-74", "ovs_interfaceid": "162a4844-743a-4cba-b137-f35170a3d072", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2198.318219] env[63279]: DEBUG oslo_concurrency.lockutils [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "refresh_cache-736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2198.318493] env[63279]: DEBUG nova.objects.instance [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lazy-loading 'migration_context' on Instance uuid 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2198.335747] env[63279]: DEBUG nova.network.neutron [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Successfully updated port: 41a761e0-23d6-4305-8d20-d999d3f5b902 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2198.403033] env[63279]: DEBUG nova.scheduler.client.report [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2198.606364] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2198.627872] env[63279]: DEBUG nova.network.neutron [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updating instance_info_cache with network_info: [{"id": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "address": "fa:16:3e:fe:5c:41", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd0a8ed6-d1", "ovs_interfaceid": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2198.685211] env[63279]: DEBUG nova.compute.manager [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Received event network-vif-deleted-06151b64-defb-4dca-a89e-f4d66bcbeb62 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2198.685512] env[63279]: DEBUG nova.compute.manager [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Received event network-vif-deleted-f5f17197-3d49-4ae2-a895-9bab23e07301 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2198.685638] env[63279]: DEBUG nova.compute.manager [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Received event network-vif-plugged-41a761e0-23d6-4305-8d20-d999d3f5b902 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2198.685835] env[63279]: DEBUG oslo_concurrency.lockutils [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] Acquiring lock "14c698c8-7459-4843-bb19-f915742e3e53-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2198.686078] env[63279]: DEBUG oslo_concurrency.lockutils [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] Lock "14c698c8-7459-4843-bb19-f915742e3e53-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2198.686266] env[63279]: DEBUG oslo_concurrency.lockutils [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] Lock "14c698c8-7459-4843-bb19-f915742e3e53-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2198.686434] env[63279]: DEBUG nova.compute.manager [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] No waiting events found dispatching network-vif-plugged-41a761e0-23d6-4305-8d20-d999d3f5b902 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2198.686602] env[63279]: WARNING nova.compute.manager [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Received unexpected event network-vif-plugged-41a761e0-23d6-4305-8d20-d999d3f5b902 for instance with vm_state building and task_state spawning. [ 2198.686767] env[63279]: DEBUG nova.compute.manager [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Received event network-changed-41a761e0-23d6-4305-8d20-d999d3f5b902 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2198.686922] env[63279]: DEBUG nova.compute.manager [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Refreshing instance network info cache due to event network-changed-41a761e0-23d6-4305-8d20-d999d3f5b902. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2198.687167] env[63279]: DEBUG oslo_concurrency.lockutils [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] Acquiring lock "refresh_cache-14c698c8-7459-4843-bb19-f915742e3e53" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2198.687313] env[63279]: DEBUG oslo_concurrency.lockutils [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] Acquired lock "refresh_cache-14c698c8-7459-4843-bb19-f915742e3e53" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2198.687475] env[63279]: DEBUG nova.network.neutron [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Refreshing network info cache for port 41a761e0-23d6-4305-8d20-d999d3f5b902 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2198.713366] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Releasing lock "refresh_cache-27e2917d-3cd0-4ad3-ab65-f85f7d97225f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2198.713668] env[63279]: DEBUG nova.compute.manager [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Instance network_info: |[{"id": "0f4606f9-e916-43fe-8ad4-57247bfb98a2", "address": "fa:16:3e:2b:f5:46", "network": {"id": "5e9abed5-377b-4644-a91c-52b6e1cc7c00", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-235825661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "421b189f81304db9b91f30c710dc2f30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f4606f9-e9", "ovs_interfaceid": "0f4606f9-e916-43fe-8ad4-57247bfb98a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2198.714069] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:f5:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f4606f9-e916-43fe-8ad4-57247bfb98a2', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2198.721527] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Creating folder: Project (421b189f81304db9b91f30c710dc2f30). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2198.721997] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-347e5cca-0659-4d07-8c42-2694178c4ed3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.733610] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Created folder: Project (421b189f81304db9b91f30c710dc2f30) in parent group-v427491. [ 2198.733800] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Creating folder: Instances. Parent ref: group-v427723. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2198.734034] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-352b52c9-9d40-4386-882b-c5cac0b9dbf7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.746098] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Created folder: Instances in parent group-v427723. [ 2198.746347] env[63279]: DEBUG oslo.service.loopingcall [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2198.746534] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2198.746739] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e12e8e49-c9de-469b-94f1-fd08521912bc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.765501] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2198.765501] env[63279]: value = "task-2087738" [ 2198.765501] env[63279]: _type = "Task" [ 2198.765501] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2198.773073] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087738, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2198.787640] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Releasing lock "refresh_cache-9b98a316-71da-45fb-b895-553f179fe7d9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2198.787942] env[63279]: DEBUG nova.compute.manager [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Instance network_info: |[{"id": "162a4844-743a-4cba-b137-f35170a3d072", "address": "fa:16:3e:a1:38:39", "network": {"id": "7b4d0149-03c5-4c40-ba16-d705499cd558", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1026068065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9c0a9c34ba408c829c0b50f3592bb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap162a4844-74", "ovs_interfaceid": "162a4844-743a-4cba-b137-f35170a3d072", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2198.788362] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:38:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aaf1b231-3660-4453-b4f3-44d825b9a5dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '162a4844-743a-4cba-b137-f35170a3d072', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2198.795804] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Creating folder: Project (3d9c0a9c34ba408c829c0b50f3592bb2). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2198.796289] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9004ae8a-3f28-498f-afaf-a79f82d1bd34 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.806028] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Created folder: Project (3d9c0a9c34ba408c829c0b50f3592bb2) in parent group-v427491. [ 2198.806234] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Creating folder: Instances. Parent ref: group-v427726. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2198.806468] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-788d10c2-b584-48b3-905a-4f3164d85949 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.817047] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Created folder: Instances in parent group-v427726. [ 2198.817047] env[63279]: DEBUG oslo.service.loopingcall [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2198.817047] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2198.817047] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-276ae286-5b55-401a-960c-f927c63e2a13 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.832593] env[63279]: DEBUG nova.objects.base [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Object Instance<736ab3ac-b9e0-4f9e-885b-765ca7a92ed0> lazy-loaded attributes: info_cache,migration_context {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2198.834427] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0664b261-c646-47df-9121-52f64bb19b11 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.839828] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "refresh_cache-14c698c8-7459-4843-bb19-f915742e3e53" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2198.858492] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c81ee0c-7d4a-4f3d-b0ef-80e9ab1a0cc1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.861030] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2198.861030] env[63279]: value = "task-2087741" [ 2198.861030] env[63279]: _type = "Task" [ 2198.861030] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2198.867606] env[63279]: DEBUG oslo_vmware.api [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2198.867606] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]520c520f-7245-cf35-0f90-a0f1265ce0a5" [ 2198.867606] env[63279]: _type = "Task" [ 2198.867606] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2198.896339] env[63279]: DEBUG nova.compute.manager [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Received event network-vif-plugged-162a4844-743a-4cba-b137-f35170a3d072 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2198.896339] env[63279]: DEBUG oslo_concurrency.lockutils [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] Acquiring lock "9b98a316-71da-45fb-b895-553f179fe7d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2198.896339] env[63279]: DEBUG oslo_concurrency.lockutils [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] Lock "9b98a316-71da-45fb-b895-553f179fe7d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2198.896339] env[63279]: DEBUG oslo_concurrency.lockutils [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] Lock "9b98a316-71da-45fb-b895-553f179fe7d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2198.896339] env[63279]: DEBUG nova.compute.manager [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] No waiting events found dispatching network-vif-plugged-162a4844-743a-4cba-b137-f35170a3d072 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2198.896339] env[63279]: WARNING nova.compute.manager [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Received unexpected event network-vif-plugged-162a4844-743a-4cba-b137-f35170a3d072 for instance with vm_state building and task_state spawning. [ 2198.896339] env[63279]: DEBUG nova.compute.manager [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Received event network-changed-0f4606f9-e916-43fe-8ad4-57247bfb98a2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2198.896339] env[63279]: DEBUG nova.compute.manager [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Refreshing instance network info cache due to event network-changed-0f4606f9-e916-43fe-8ad4-57247bfb98a2. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2198.896339] env[63279]: DEBUG oslo_concurrency.lockutils [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] Acquiring lock "refresh_cache-27e2917d-3cd0-4ad3-ab65-f85f7d97225f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2198.896339] env[63279]: DEBUG oslo_concurrency.lockutils [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] Acquired lock "refresh_cache-27e2917d-3cd0-4ad3-ab65-f85f7d97225f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2198.896339] env[63279]: DEBUG nova.network.neutron [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Refreshing network info cache for port 0f4606f9-e916-43fe-8ad4-57247bfb98a2 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2198.896339] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087741, 'name': CreateVM_Task} progress is 15%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2198.896339] env[63279]: DEBUG oslo_vmware.api [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520c520f-7245-cf35-0f90-a0f1265ce0a5, 'name': SearchDatastore_Task, 'duration_secs': 0.006462} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2198.896339] env[63279]: DEBUG oslo_concurrency.lockutils [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2198.908624] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.818s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2198.911044] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.206s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2198.912784] env[63279]: INFO nova.compute.claims [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2198.964702] env[63279]: INFO nova.scheduler.client.report [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Deleted allocations for instance 861e4118-6134-40cf-91cb-865b6ee9f347 [ 2199.131837] env[63279]: DEBUG oslo_concurrency.lockutils [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2199.256239] env[63279]: DEBUG nova.network.neutron [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2199.278942] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087738, 'name': CreateVM_Task, 'duration_secs': 0.379621} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.279334] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2199.280237] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2199.280448] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2199.280796] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2199.281181] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49f3ea7d-1748-4da6-a152-8ab36f83ff02 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.286095] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Waiting for the task: (returnval){ [ 2199.286095] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529d25d6-2dfa-f15c-0094-a3ab5937bbfc" [ 2199.286095] env[63279]: _type = "Task" [ 2199.286095] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.294065] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529d25d6-2dfa-f15c-0094-a3ab5937bbfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.372530] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087741, 'name': CreateVM_Task, 'duration_secs': 0.344481} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.372757] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2199.376250] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2199.416816] env[63279]: DEBUG nova.network.neutron [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2199.474101] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c03c3095-5bce-4baa-893b-4ed2dab285cd tempest-ServersTestBootFromVolume-1861685268 tempest-ServersTestBootFromVolume-1861685268-project-member] Lock "861e4118-6134-40cf-91cb-865b6ee9f347" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.560s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2199.695407] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2199.696333] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135bf0aa-65ac-44f9-818b-73be16b04798 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.707367] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2199.707987] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16711dc6-1897-42df-91cb-f9fb87e1c6cc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.721498] env[63279]: DEBUG nova.network.neutron [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Updated VIF entry in instance network info cache for port 0f4606f9-e916-43fe-8ad4-57247bfb98a2. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2199.721861] env[63279]: DEBUG nova.network.neutron [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Updating instance_info_cache with network_info: [{"id": "0f4606f9-e916-43fe-8ad4-57247bfb98a2", "address": "fa:16:3e:2b:f5:46", "network": {"id": "5e9abed5-377b-4644-a91c-52b6e1cc7c00", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-235825661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "421b189f81304db9b91f30c710dc2f30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f4606f9-e9", "ovs_interfaceid": "0f4606f9-e916-43fe-8ad4-57247bfb98a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2199.798399] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529d25d6-2dfa-f15c-0094-a3ab5937bbfc, 'name': SearchDatastore_Task, 'duration_secs': 0.021322} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.798709] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2199.798950] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2199.799216] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2199.799367] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2199.799546] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2199.799867] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2199.800351] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2199.800593] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-606469d4-0551-4a15-baf1-f688c1fffe09 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.802466] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-150e4b68-07b5-410a-9b99-72c552aa26ae {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.807866] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2199.807866] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527d0e48-4633-e176-d789-ae45d960d879" [ 2199.807866] env[63279]: _type = "Task" [ 2199.807866] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.812061] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2199.812268] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2199.813280] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68b37560-df39-403d-9b00-81476a29868d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.818375] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527d0e48-4633-e176-d789-ae45d960d879, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.821490] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Waiting for the task: (returnval){ [ 2199.821490] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5235096d-a8aa-9008-2b04-15dc63507733" [ 2199.821490] env[63279]: _type = "Task" [ 2199.821490] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.828970] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5235096d-a8aa-9008-2b04-15dc63507733, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.923836] env[63279]: DEBUG oslo_concurrency.lockutils [req-66c77b13-6345-49c3-bd20-b568f932d9fc req-07289717-c2c4-44d6-b65e-8b40c0ad30be service nova] Releasing lock "refresh_cache-14c698c8-7459-4843-bb19-f915742e3e53" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2199.931670] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "refresh_cache-14c698c8-7459-4843-bb19-f915742e3e53" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2199.931670] env[63279]: DEBUG nova.network.neutron [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2199.992851] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2199.993101] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2199.993302] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleting the datastore file [datastore1] a15141bc-424d-48ca-a6d5-c859a3639a0b {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2199.996770] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a24ceb4e-449e-4212-b508-f09a4928a5eb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.002686] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2200.002686] env[63279]: value = "task-2087743" [ 2200.002686] env[63279]: _type = "Task" [ 2200.002686] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2200.012315] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087743, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2200.161781] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25bd463f-dff4-48c8-8661-1fa37221cca9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.169460] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa40d89-2a97-40d1-8206-a667b7486f2f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.200650] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f902fcd9-7bd0-4c08-9050-0b366ab062d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.209066] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b38af63-453b-4580-a9fa-77f8c06fdc82 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.223020] env[63279]: DEBUG nova.compute.provider_tree [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2200.224586] env[63279]: DEBUG oslo_concurrency.lockutils [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] Releasing lock "refresh_cache-27e2917d-3cd0-4ad3-ab65-f85f7d97225f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2200.224850] env[63279]: DEBUG nova.compute.manager [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Received event network-changed-162a4844-743a-4cba-b137-f35170a3d072 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2200.225036] env[63279]: DEBUG nova.compute.manager [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Refreshing instance network info cache due to event network-changed-162a4844-743a-4cba-b137-f35170a3d072. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2200.225251] env[63279]: DEBUG oslo_concurrency.lockutils [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] Acquiring lock "refresh_cache-9b98a316-71da-45fb-b895-553f179fe7d9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2200.225396] env[63279]: DEBUG oslo_concurrency.lockutils [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] Acquired lock "refresh_cache-9b98a316-71da-45fb-b895-553f179fe7d9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2200.225557] env[63279]: DEBUG nova.network.neutron [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Refreshing network info cache for port 162a4844-743a-4cba-b137-f35170a3d072 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2200.319711] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527d0e48-4633-e176-d789-ae45d960d879, 'name': SearchDatastore_Task, 'duration_secs': 0.014856} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2200.320040] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2200.320279] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2200.320489] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2200.330886] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5235096d-a8aa-9008-2b04-15dc63507733, 'name': SearchDatastore_Task, 'duration_secs': 0.021673} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2200.331612] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e793d40-762c-4e70-8376-f9d68b2606db {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.336452] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Waiting for the task: (returnval){ [ 2200.336452] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523c8579-08d8-646c-9751-b47bf719a19c" [ 2200.336452] env[63279]: _type = "Task" [ 2200.336452] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2200.343838] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523c8579-08d8-646c-9751-b47bf719a19c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2200.462602] env[63279]: DEBUG nova.network.neutron [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2200.512746] env[63279]: DEBUG oslo_vmware.api [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087743, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.292966} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2200.513248] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2200.514094] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2200.514190] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2200.545371] env[63279]: INFO nova.scheduler.client.report [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleted allocations for instance a15141bc-424d-48ca-a6d5-c859a3639a0b [ 2200.615728] env[63279]: DEBUG nova.network.neutron [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Updating instance_info_cache with network_info: [{"id": "41a761e0-23d6-4305-8d20-d999d3f5b902", "address": "fa:16:3e:82:ad:79", "network": {"id": "6d7c4c21-3389-4688-b373-329b2328f275", "bridge": "br-int", "label": "tempest-ImagesTestJSON-311425692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77ee2145dda94e2b85eeb7379ed98e26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41a761e0-23", "ovs_interfaceid": "41a761e0-23d6-4305-8d20-d999d3f5b902", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2200.712709] env[63279]: DEBUG nova.compute.manager [req-9e6305a8-795f-44ac-9586-15824c70ab2e req-87333b5a-90af-4474-af2c-57c34a048c9f service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Received event network-vif-unplugged-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2200.712964] env[63279]: DEBUG oslo_concurrency.lockutils [req-9e6305a8-795f-44ac-9586-15824c70ab2e req-87333b5a-90af-4474-af2c-57c34a048c9f service nova] Acquiring lock "a15141bc-424d-48ca-a6d5-c859a3639a0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2200.713162] env[63279]: DEBUG oslo_concurrency.lockutils [req-9e6305a8-795f-44ac-9586-15824c70ab2e req-87333b5a-90af-4474-af2c-57c34a048c9f service nova] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2200.713422] env[63279]: DEBUG oslo_concurrency.lockutils [req-9e6305a8-795f-44ac-9586-15824c70ab2e req-87333b5a-90af-4474-af2c-57c34a048c9f service nova] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2200.713497] env[63279]: DEBUG nova.compute.manager [req-9e6305a8-795f-44ac-9586-15824c70ab2e req-87333b5a-90af-4474-af2c-57c34a048c9f service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] No waiting events found dispatching network-vif-unplugged-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2200.713666] env[63279]: WARNING nova.compute.manager [req-9e6305a8-795f-44ac-9586-15824c70ab2e req-87333b5a-90af-4474-af2c-57c34a048c9f service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Received unexpected event network-vif-unplugged-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce for instance with vm_state shelved_offloaded and task_state None. [ 2200.713827] env[63279]: DEBUG nova.compute.manager [req-9e6305a8-795f-44ac-9586-15824c70ab2e req-87333b5a-90af-4474-af2c-57c34a048c9f service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Received event network-changed-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2200.713981] env[63279]: DEBUG nova.compute.manager [req-9e6305a8-795f-44ac-9586-15824c70ab2e req-87333b5a-90af-4474-af2c-57c34a048c9f service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Refreshing instance network info cache due to event network-changed-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2200.714180] env[63279]: DEBUG oslo_concurrency.lockutils [req-9e6305a8-795f-44ac-9586-15824c70ab2e req-87333b5a-90af-4474-af2c-57c34a048c9f service nova] Acquiring lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2200.714323] env[63279]: DEBUG oslo_concurrency.lockutils [req-9e6305a8-795f-44ac-9586-15824c70ab2e req-87333b5a-90af-4474-af2c-57c34a048c9f service nova] Acquired lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2200.714478] env[63279]: DEBUG nova.network.neutron [req-9e6305a8-795f-44ac-9586-15824c70ab2e req-87333b5a-90af-4474-af2c-57c34a048c9f service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Refreshing network info cache for port cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2200.728338] env[63279]: DEBUG nova.scheduler.client.report [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2200.846435] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523c8579-08d8-646c-9751-b47bf719a19c, 'name': SearchDatastore_Task, 'duration_secs': 0.009156} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2200.846716] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2200.846975] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 27e2917d-3cd0-4ad3-ab65-f85f7d97225f/27e2917d-3cd0-4ad3-ab65-f85f7d97225f.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2200.847270] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2200.847457] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2200.847706] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52ee39fe-c54e-488b-92e5-ccee92547901 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.850466] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-804fa46d-bb4f-421c-9ebd-522ac9d5e78e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.856853] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Waiting for the task: (returnval){ [ 2200.856853] env[63279]: value = "task-2087744" [ 2200.856853] env[63279]: _type = "Task" [ 2200.856853] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2200.860656] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2200.860857] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2200.861824] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6893994-2ff2-492b-9256-66aed685fe0f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.866834] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2087744, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2200.869671] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2200.869671] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c787bc-3c4b-3f61-1936-e99b0613616a" [ 2200.869671] env[63279]: _type = "Task" [ 2200.869671] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2200.877277] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c787bc-3c4b-3f61-1936-e99b0613616a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.000184] env[63279]: DEBUG nova.network.neutron [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Updated VIF entry in instance network info cache for port 162a4844-743a-4cba-b137-f35170a3d072. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2201.000569] env[63279]: DEBUG nova.network.neutron [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Updating instance_info_cache with network_info: [{"id": "162a4844-743a-4cba-b137-f35170a3d072", "address": "fa:16:3e:a1:38:39", "network": {"id": "7b4d0149-03c5-4c40-ba16-d705499cd558", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1026068065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9c0a9c34ba408c829c0b50f3592bb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap162a4844-74", "ovs_interfaceid": "162a4844-743a-4cba-b137-f35170a3d072", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2201.049790] env[63279]: DEBUG oslo_concurrency.lockutils [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2201.119753] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "refresh_cache-14c698c8-7459-4843-bb19-f915742e3e53" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2201.120152] env[63279]: DEBUG nova.compute.manager [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Instance network_info: |[{"id": "41a761e0-23d6-4305-8d20-d999d3f5b902", "address": "fa:16:3e:82:ad:79", "network": {"id": "6d7c4c21-3389-4688-b373-329b2328f275", "bridge": "br-int", "label": "tempest-ImagesTestJSON-311425692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77ee2145dda94e2b85eeb7379ed98e26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41a761e0-23", "ovs_interfaceid": "41a761e0-23d6-4305-8d20-d999d3f5b902", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2201.120588] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:ad:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0df968ae-c1ef-4009-a0f4-6f2e799c2fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41a761e0-23d6-4305-8d20-d999d3f5b902', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2201.128538] env[63279]: DEBUG oslo.service.loopingcall [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2201.128817] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2201.129067] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67749cc4-d0f7-4dd7-a157-04a16959369b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.150025] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2201.150025] env[63279]: value = "task-2087745" [ 2201.150025] env[63279]: _type = "Task" [ 2201.150025] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2201.158319] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087745, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.235371] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.324s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2201.236376] env[63279]: DEBUG nova.compute.manager [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2201.239466] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.881s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2201.240281] env[63279]: INFO nova.compute.claims [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2201.371535] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2087744, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.380017] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c787bc-3c4b-3f61-1936-e99b0613616a, 'name': SearchDatastore_Task, 'duration_secs': 0.008502} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2201.380815] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2ef1253-069d-414a-9973-cee346a81ed9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.387975] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2201.387975] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bb549d-5b18-5aa6-d5ad-4228c73013fc" [ 2201.387975] env[63279]: _type = "Task" [ 2201.387975] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2201.397472] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bb549d-5b18-5aa6-d5ad-4228c73013fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.503203] env[63279]: DEBUG oslo_concurrency.lockutils [req-a14e7f19-5c65-4f58-ad65-01b60e2c48c2 req-381ae6b6-97d1-459e-994e-0bf0d0b599b7 service nova] Releasing lock "refresh_cache-9b98a316-71da-45fb-b895-553f179fe7d9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2201.545097] env[63279]: DEBUG nova.network.neutron [req-9e6305a8-795f-44ac-9586-15824c70ab2e req-87333b5a-90af-4474-af2c-57c34a048c9f service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updated VIF entry in instance network info cache for port cd0a8ed6-d1ee-4266-8bde-e866ac2873ce. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2201.545097] env[63279]: DEBUG nova.network.neutron [req-9e6305a8-795f-44ac-9586-15824c70ab2e req-87333b5a-90af-4474-af2c-57c34a048c9f service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updating instance_info_cache with network_info: [{"id": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "address": "fa:16:3e:fe:5c:41", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapcd0a8ed6-d1", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2201.660339] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087745, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.746936] env[63279]: DEBUG nova.compute.utils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2201.750920] env[63279]: DEBUG nova.compute.manager [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2201.750920] env[63279]: DEBUG nova.network.neutron [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2201.798206] env[63279]: DEBUG nova.policy [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '655d692da88947b89104e1f14f7d71f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5a719a21fe248c49d0d0151d218866b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2201.869888] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2087744, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520803} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2201.870206] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 27e2917d-3cd0-4ad3-ab65-f85f7d97225f/27e2917d-3cd0-4ad3-ab65-f85f7d97225f.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2201.870449] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2201.870702] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-80eebf0e-1b6b-4b80-aae9-87a377fffb2c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.880436] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Waiting for the task: (returnval){ [ 2201.880436] env[63279]: value = "task-2087746" [ 2201.880436] env[63279]: _type = "Task" [ 2201.880436] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2201.889356] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2087746, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.899151] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bb549d-5b18-5aa6-d5ad-4228c73013fc, 'name': SearchDatastore_Task, 'duration_secs': 0.060095} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2201.899533] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2201.899898] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 9b98a316-71da-45fb-b895-553f179fe7d9/9b98a316-71da-45fb-b895-553f179fe7d9.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2201.900295] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-332bec54-2e64-4d26-995b-c9f2314fde27 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.908090] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2201.908090] env[63279]: value = "task-2087747" [ 2201.908090] env[63279]: _type = "Task" [ 2201.908090] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2201.915498] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.048443] env[63279]: DEBUG oslo_concurrency.lockutils [req-9e6305a8-795f-44ac-9586-15824c70ab2e req-87333b5a-90af-4474-af2c-57c34a048c9f service nova] Releasing lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2202.161354] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087745, 'name': CreateVM_Task, 'duration_secs': 0.7941} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2202.161530] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2202.162273] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2202.162447] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2202.162769] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2202.163038] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df30084d-fa75-4b8c-977e-6c063e1b334b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.167842] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2202.167842] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]520892b4-5ab8-d160-87af-9e108022d95e" [ 2202.167842] env[63279]: _type = "Task" [ 2202.167842] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.176278] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520892b4-5ab8-d160-87af-9e108022d95e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.253340] env[63279]: DEBUG nova.compute.manager [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2202.335663] env[63279]: DEBUG nova.network.neutron [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Successfully created port: 4556aa7b-156a-4ab0-838d-aed88aa7449f {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2202.390207] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2087746, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079585} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2202.390515] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2202.391338] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72cdc563-1262-4d97-8e3d-c9ffad2d27ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.424892] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 27e2917d-3cd0-4ad3-ab65-f85f7d97225f/27e2917d-3cd0-4ad3-ab65-f85f7d97225f.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2202.430954] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b37495bb-7810-43a6-8899-7196bd76d959 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.453683] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.456116] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Waiting for the task: (returnval){ [ 2202.456116] env[63279]: value = "task-2087748" [ 2202.456116] env[63279]: _type = "Task" [ 2202.456116] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.466311] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2087748, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.539271] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4bf54f-f1ed-492e-9cb3-ce70f07f2b4d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.546738] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1d4d60-73b1-4f9f-946a-bface9b369a2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.577655] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a419bf8-ff84-4650-9218-9e3491cdd841 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.585388] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9171e610-8a80-4e79-9f7b-ad67c2ab03fc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.598476] env[63279]: DEBUG nova.compute.provider_tree [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2202.677911] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520892b4-5ab8-d160-87af-9e108022d95e, 'name': SearchDatastore_Task, 'duration_secs': 0.009373} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2202.678254] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2202.678496] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2202.678739] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2202.678890] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2202.679086] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2202.679358] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4baa64e6-0fca-47d9-b9ed-f7b72d8dd58e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.687867] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2202.688058] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2202.688765] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0572a1b1-f7f5-4d45-9d5c-51eec0ef2048 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.694167] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2202.694167] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c5c363-895e-5909-af9f-639b43580060" [ 2202.694167] env[63279]: _type = "Task" [ 2202.694167] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.703542] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c5c363-895e-5909-af9f-639b43580060, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.926542] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.969747] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2087748, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.102502] env[63279]: DEBUG nova.scheduler.client.report [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2203.207473] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c5c363-895e-5909-af9f-639b43580060, 'name': SearchDatastore_Task, 'duration_secs': 0.008694} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.208409] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d24e16aa-719a-4ae5-a8a0-11b7f2f10ac5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.217075] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2203.217075] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d13ff4-501a-0280-8b08-7d0d37098abc" [ 2203.217075] env[63279]: _type = "Task" [ 2203.217075] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.227284] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d13ff4-501a-0280-8b08-7d0d37098abc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.265484] env[63279]: DEBUG nova.compute.manager [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2203.299943] env[63279]: DEBUG nova.virt.hardware [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2203.300237] env[63279]: DEBUG nova.virt.hardware [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2203.300463] env[63279]: DEBUG nova.virt.hardware [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2203.300659] env[63279]: DEBUG nova.virt.hardware [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2203.300816] env[63279]: DEBUG nova.virt.hardware [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2203.300969] env[63279]: DEBUG nova.virt.hardware [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2203.301201] env[63279]: DEBUG nova.virt.hardware [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2203.301420] env[63279]: DEBUG nova.virt.hardware [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2203.301608] env[63279]: DEBUG nova.virt.hardware [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2203.301775] env[63279]: DEBUG nova.virt.hardware [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2203.301959] env[63279]: DEBUG nova.virt.hardware [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2203.303039] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09d0c42-e98b-42ce-915e-6f2bba65dbe7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.313921] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ffb64d7-d511-4ae9-a306-b4cc7b1fb599 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.386296] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2203.426997] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087747, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.47957} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.427312] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 9b98a316-71da-45fb-b895-553f179fe7d9/9b98a316-71da-45fb-b895-553f179fe7d9.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2203.427578] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2203.427984] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-38295d3e-20b2-45e7-b814-e6fb016c9bde {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.435203] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2203.435203] env[63279]: value = "task-2087749" [ 2203.435203] env[63279]: _type = "Task" [ 2203.435203] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.444157] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087749, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.472137] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2087748, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.612388] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.374s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2203.613188] env[63279]: DEBUG nova.compute.manager [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2203.615724] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.754s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2203.617147] env[63279]: INFO nova.compute.claims [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2203.728158] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d13ff4-501a-0280-8b08-7d0d37098abc, 'name': SearchDatastore_Task, 'duration_secs': 0.060702} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.728402] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2203.728663] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 14c698c8-7459-4843-bb19-f915742e3e53/14c698c8-7459-4843-bb19-f915742e3e53.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2203.728925] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e0e6dd8-d049-4538-97fc-6eae8d0d58f0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.735742] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2203.735742] env[63279]: value = "task-2087750" [ 2203.735742] env[63279]: _type = "Task" [ 2203.735742] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.745278] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087750, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.945360] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087749, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068084} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.945651] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2203.948358] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3ac563-7b33-4f7b-be14-d53080ed22f4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.978793] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 9b98a316-71da-45fb-b895-553f179fe7d9/9b98a316-71da-45fb-b895-553f179fe7d9.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2203.983616] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12b6eb03-079c-48fc-b671-d09d13d065f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.999433] env[63279]: DEBUG nova.compute.manager [req-ed4170de-0851-4b32-baed-e70000f778ad req-53a9eec2-5ece-4653-85e4-c1fcd8f2a594 service nova] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Received event network-vif-plugged-4556aa7b-156a-4ab0-838d-aed88aa7449f {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2203.999698] env[63279]: DEBUG oslo_concurrency.lockutils [req-ed4170de-0851-4b32-baed-e70000f778ad req-53a9eec2-5ece-4653-85e4-c1fcd8f2a594 service nova] Acquiring lock "f18d3019-bf1f-4519-a824-7ca80458d793-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2203.999950] env[63279]: DEBUG oslo_concurrency.lockutils [req-ed4170de-0851-4b32-baed-e70000f778ad req-53a9eec2-5ece-4653-85e4-c1fcd8f2a594 service nova] Lock "f18d3019-bf1f-4519-a824-7ca80458d793-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2204.000133] env[63279]: DEBUG oslo_concurrency.lockutils [req-ed4170de-0851-4b32-baed-e70000f778ad req-53a9eec2-5ece-4653-85e4-c1fcd8f2a594 service nova] Lock "f18d3019-bf1f-4519-a824-7ca80458d793-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2204.000308] env[63279]: DEBUG nova.compute.manager [req-ed4170de-0851-4b32-baed-e70000f778ad req-53a9eec2-5ece-4653-85e4-c1fcd8f2a594 service nova] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] No waiting events found dispatching network-vif-plugged-4556aa7b-156a-4ab0-838d-aed88aa7449f {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2204.000509] env[63279]: WARNING nova.compute.manager [req-ed4170de-0851-4b32-baed-e70000f778ad req-53a9eec2-5ece-4653-85e4-c1fcd8f2a594 service nova] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Received unexpected event network-vif-plugged-4556aa7b-156a-4ab0-838d-aed88aa7449f for instance with vm_state building and task_state spawning. [ 2204.007408] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2087748, 'name': ReconfigVM_Task, 'duration_secs': 1.046809} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.009557] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 27e2917d-3cd0-4ad3-ab65-f85f7d97225f/27e2917d-3cd0-4ad3-ab65-f85f7d97225f.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2204.010382] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2204.010382] env[63279]: value = "task-2087751" [ 2204.010382] env[63279]: _type = "Task" [ 2204.010382] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.010608] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-99e21c5c-0b90-49ff-9b86-7db3435c03bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.022834] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087751, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.024253] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Waiting for the task: (returnval){ [ 2204.024253] env[63279]: value = "task-2087752" [ 2204.024253] env[63279]: _type = "Task" [ 2204.024253] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.032699] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2087752, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.078906] env[63279]: DEBUG nova.network.neutron [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Successfully updated port: 4556aa7b-156a-4ab0-838d-aed88aa7449f {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2204.123812] env[63279]: DEBUG nova.compute.utils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2204.125775] env[63279]: DEBUG nova.compute.manager [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2204.126029] env[63279]: DEBUG nova.network.neutron [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2204.182180] env[63279]: DEBUG nova.policy [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6343b207f7294f5fa2a8111940083fb0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5b21bc5072e4945a19a782dd9561709', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2204.248414] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087750, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.493344] env[63279]: DEBUG nova.network.neutron [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Successfully created port: 0bd52488-d960-42c9-8077-fc8fe9eda956 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2204.527792] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087751, 'name': ReconfigVM_Task, 'duration_secs': 0.480597} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.531523] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 9b98a316-71da-45fb-b895-553f179fe7d9/9b98a316-71da-45fb-b895-553f179fe7d9.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2204.532177] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49f677ba-ada3-4493-a8ad-2de0683f1075 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.542591] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2204.542591] env[63279]: value = "task-2087753" [ 2204.542591] env[63279]: _type = "Task" [ 2204.542591] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.542837] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2087752, 'name': Rename_Task, 'duration_secs': 0.376711} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.543185] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2204.546176] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31e3a2f5-70d8-4b87-8f6b-a0e9937c8cb3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.555805] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087753, 'name': Rename_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.557101] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Waiting for the task: (returnval){ [ 2204.557101] env[63279]: value = "task-2087754" [ 2204.557101] env[63279]: _type = "Task" [ 2204.557101] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.565367] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2087754, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.581329] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "refresh_cache-f18d3019-bf1f-4519-a824-7ca80458d793" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2204.581496] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "refresh_cache-f18d3019-bf1f-4519-a824-7ca80458d793" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2204.581658] env[63279]: DEBUG nova.network.neutron [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2204.633343] env[63279]: DEBUG nova.compute.manager [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2204.751924] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087750, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.684026} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.752552] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 14c698c8-7459-4843-bb19-f915742e3e53/14c698c8-7459-4843-bb19-f915742e3e53.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2204.752552] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2204.752749] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-511f6850-1334-427b-a547-a5d6da41d462 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.760367] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2204.760367] env[63279]: value = "task-2087755" [ 2204.760367] env[63279]: _type = "Task" [ 2204.760367] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.771037] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087755, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.896839] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8173228-739f-4cdb-9ad7-348827768924 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.907305] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9c980f-b711-4344-94cf-af68ef0caa8d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.944035] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0288689-4190-499e-bc0e-960aab728ab4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.952724] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95eb0af-9c83-4ca0-8fc0-bac1f3aaa24c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.967650] env[63279]: DEBUG nova.compute.provider_tree [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2205.055118] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087753, 'name': Rename_Task, 'duration_secs': 0.139475} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.055430] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2205.055680] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ef7a1c2-1991-4d1f-a0cb-6714121e95c3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.063570] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2205.063570] env[63279]: value = "task-2087756" [ 2205.063570] env[63279]: _type = "Task" [ 2205.063570] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.066756] env[63279]: DEBUG oslo_vmware.api [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2087754, 'name': PowerOnVM_Task, 'duration_secs': 0.462094} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.069801] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2205.070029] env[63279]: INFO nova.compute.manager [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Took 10.21 seconds to spawn the instance on the hypervisor. [ 2205.071059] env[63279]: DEBUG nova.compute.manager [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2205.071138] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82d37d3-6042-4249-905c-c924ce5499ff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.085445] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087756, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.117695] env[63279]: DEBUG nova.network.neutron [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2205.270606] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087755, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068423} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.271260] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2205.271753] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746ff2b8-f167-4aba-9edf-9df4e548c36a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.295264] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 14c698c8-7459-4843-bb19-f915742e3e53/14c698c8-7459-4843-bb19-f915742e3e53.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2205.296428] env[63279]: DEBUG nova.network.neutron [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Updating instance_info_cache with network_info: [{"id": "4556aa7b-156a-4ab0-838d-aed88aa7449f", "address": "fa:16:3e:c4:58:31", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4556aa7b-15", "ovs_interfaceid": "4556aa7b-156a-4ab0-838d-aed88aa7449f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2205.298041] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a12d984-f4be-4288-a9b8-99c7567fc1f0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.316078] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "refresh_cache-f18d3019-bf1f-4519-a824-7ca80458d793" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2205.316078] env[63279]: DEBUG nova.compute.manager [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Instance network_info: |[{"id": "4556aa7b-156a-4ab0-838d-aed88aa7449f", "address": "fa:16:3e:c4:58:31", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4556aa7b-15", "ovs_interfaceid": "4556aa7b-156a-4ab0-838d-aed88aa7449f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2205.316078] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:58:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4556aa7b-156a-4ab0-838d-aed88aa7449f', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2205.323246] env[63279]: DEBUG oslo.service.loopingcall [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2205.324236] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2205.325058] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e1965a2-c3fb-467d-b317-15348d03ae0a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.343184] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2205.343184] env[63279]: value = "task-2087757" [ 2205.343184] env[63279]: _type = "Task" [ 2205.343184] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.348328] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2205.348328] env[63279]: value = "task-2087758" [ 2205.348328] env[63279]: _type = "Task" [ 2205.348328] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.355840] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087757, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.362733] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087758, 'name': CreateVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.390925] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "59ad6180-e561-41e3-86e4-37fc20819578" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2205.391193] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "59ad6180-e561-41e3-86e4-37fc20819578" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2205.472756] env[63279]: DEBUG nova.scheduler.client.report [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2205.576637] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087756, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.594710] env[63279]: INFO nova.compute.manager [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Took 21.84 seconds to build instance. [ 2205.645892] env[63279]: DEBUG nova.compute.manager [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2205.658391] env[63279]: DEBUG oslo_concurrency.lockutils [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2205.658639] env[63279]: DEBUG oslo_concurrency.lockutils [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2205.658900] env[63279]: INFO nova.compute.manager [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Rebooting instance [ 2205.673779] env[63279]: DEBUG nova.virt.hardware [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2205.674062] env[63279]: DEBUG nova.virt.hardware [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2205.674256] env[63279]: DEBUG nova.virt.hardware [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2205.674478] env[63279]: DEBUG nova.virt.hardware [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2205.674672] env[63279]: DEBUG nova.virt.hardware [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2205.674866] env[63279]: DEBUG nova.virt.hardware [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2205.678024] env[63279]: DEBUG nova.virt.hardware [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2205.678024] env[63279]: DEBUG nova.virt.hardware [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2205.678024] env[63279]: DEBUG nova.virt.hardware [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2205.678024] env[63279]: DEBUG nova.virt.hardware [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2205.678024] env[63279]: DEBUG nova.virt.hardware [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2205.679398] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704219fe-75b9-491c-b0c7-91a4102d24a0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.688867] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b195e9-1a00-4f08-ae0d-4ed5d3bdbaa8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.859597] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087757, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.866669] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087758, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.893394] env[63279]: DEBUG nova.compute.manager [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2205.980075] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.364s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2205.980691] env[63279]: DEBUG nova.compute.manager [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2205.986828] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.380s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2205.987077] env[63279]: DEBUG nova.objects.instance [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lazy-loading 'resources' on Instance uuid 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2206.085021] env[63279]: DEBUG oslo_vmware.api [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087756, 'name': PowerOnVM_Task, 'duration_secs': 0.530962} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.085021] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2206.085021] env[63279]: INFO nova.compute.manager [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Took 13.66 seconds to spawn the instance on the hypervisor. [ 2206.085021] env[63279]: DEBUG nova.compute.manager [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2206.085021] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4367bbe-0be3-4f39-991d-3380af1d2c73 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.097168] env[63279]: DEBUG oslo_concurrency.lockutils [None req-73bbe86f-3fab-4cd0-9b07-18088e80054f tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Lock "27e2917d-3cd0-4ad3-ab65-f85f7d97225f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.350s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2206.123836] env[63279]: DEBUG nova.compute.manager [req-f0757c33-1d68-47d0-a899-31e9f548e91a req-828c2043-f33f-4491-bc0f-a4198fb6162c service nova] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Received event network-changed-4556aa7b-156a-4ab0-838d-aed88aa7449f {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2206.123836] env[63279]: DEBUG nova.compute.manager [req-f0757c33-1d68-47d0-a899-31e9f548e91a req-828c2043-f33f-4491-bc0f-a4198fb6162c service nova] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Refreshing instance network info cache due to event network-changed-4556aa7b-156a-4ab0-838d-aed88aa7449f. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2206.123836] env[63279]: DEBUG oslo_concurrency.lockutils [req-f0757c33-1d68-47d0-a899-31e9f548e91a req-828c2043-f33f-4491-bc0f-a4198fb6162c service nova] Acquiring lock "refresh_cache-f18d3019-bf1f-4519-a824-7ca80458d793" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.126332] env[63279]: DEBUG oslo_concurrency.lockutils [req-f0757c33-1d68-47d0-a899-31e9f548e91a req-828c2043-f33f-4491-bc0f-a4198fb6162c service nova] Acquired lock "refresh_cache-f18d3019-bf1f-4519-a824-7ca80458d793" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.126332] env[63279]: DEBUG nova.network.neutron [req-f0757c33-1d68-47d0-a899-31e9f548e91a req-828c2043-f33f-4491-bc0f-a4198fb6162c service nova] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Refreshing network info cache for port 4556aa7b-156a-4ab0-838d-aed88aa7449f {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2206.186659] env[63279]: DEBUG oslo_concurrency.lockutils [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.186659] env[63279]: DEBUG oslo_concurrency.lockutils [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.186659] env[63279]: DEBUG nova.network.neutron [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2206.300539] env[63279]: DEBUG nova.network.neutron [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Successfully updated port: 0bd52488-d960-42c9-8077-fc8fe9eda956 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2206.354096] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087757, 'name': ReconfigVM_Task, 'duration_secs': 0.679393} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.357071] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 14c698c8-7459-4843-bb19-f915742e3e53/14c698c8-7459-4843-bb19-f915742e3e53.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2206.357694] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-adf04921-70de-42d0-87ef-224b2b3484d4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.364096] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087758, 'name': CreateVM_Task, 'duration_secs': 0.72685} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.365153] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2206.365491] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2206.365491] env[63279]: value = "task-2087759" [ 2206.365491] env[63279]: _type = "Task" [ 2206.365491] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.366171] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.366376] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.366694] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2206.366996] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f76b2a5-cfe5-456b-b61f-1fce84881fc6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.377956] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087759, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.378262] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2206.378262] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5293e12e-6937-8dca-d8a6-64d1cf9f617a" [ 2206.378262] env[63279]: _type = "Task" [ 2206.378262] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.385522] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5293e12e-6937-8dca-d8a6-64d1cf9f617a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.418632] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2206.490460] env[63279]: DEBUG nova.compute.utils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2206.492903] env[63279]: DEBUG nova.compute.manager [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2206.493153] env[63279]: DEBUG nova.network.neutron [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2206.575091] env[63279]: DEBUG nova.policy [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55199c771de04067a936194078ef99f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4fcef39e334249afb9636455802059c5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2206.605745] env[63279]: INFO nova.compute.manager [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Took 23.92 seconds to build instance. [ 2206.742434] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c03614e-d328-474a-a980-c743e7037c8b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.750146] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d6f494-f062-4b18-ba4d-12886efe3302 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.780948] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa84f37f-25cc-44a6-94ea-fa9e593ca0b8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.788157] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7155683-a969-4486-b01a-d2f331aa807b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.804801] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.804972] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.805119] env[63279]: DEBUG nova.network.neutron [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2206.806769] env[63279]: DEBUG nova.compute.provider_tree [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2206.889210] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087759, 'name': Rename_Task, 'duration_secs': 0.184315} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.890062] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2206.890833] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-300b67f7-bd11-4ab5-9a29-b2c7536a7790 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.898723] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5293e12e-6937-8dca-d8a6-64d1cf9f617a, 'name': SearchDatastore_Task, 'duration_secs': 0.015127} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.899587] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2206.899955] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2206.900355] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.900635] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.900944] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2206.901334] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b31c35f-4e6c-42ac-abca-a555b8e38e02 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.905430] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2206.905430] env[63279]: value = "task-2087760" [ 2206.905430] env[63279]: _type = "Task" [ 2206.905430] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.913472] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2206.913656] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2206.919144] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3adc9a1-a271-40a8-82c7-051a8d852eac {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.919211] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087760, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.924345] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2206.924345] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d9aff1-010d-8185-68fe-40b1fd4ccb91" [ 2206.924345] env[63279]: _type = "Task" [ 2206.924345] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.932297] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d9aff1-010d-8185-68fe-40b1fd4ccb91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.957759] env[63279]: DEBUG nova.network.neutron [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance_info_cache with network_info: [{"id": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "address": "fa:16:3e:cc:16:0a", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7baa1106-63", "ovs_interfaceid": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2206.996795] env[63279]: DEBUG nova.compute.manager [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2207.107827] env[63279]: DEBUG oslo_concurrency.lockutils [None req-90a4661b-87b3-4198-ac53-8b946588d520 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "9b98a316-71da-45fb-b895-553f179fe7d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.429s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2207.180116] env[63279]: DEBUG nova.network.neutron [req-f0757c33-1d68-47d0-a899-31e9f548e91a req-828c2043-f33f-4491-bc0f-a4198fb6162c service nova] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Updated VIF entry in instance network info cache for port 4556aa7b-156a-4ab0-838d-aed88aa7449f. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2207.180494] env[63279]: DEBUG nova.network.neutron [req-f0757c33-1d68-47d0-a899-31e9f548e91a req-828c2043-f33f-4491-bc0f-a4198fb6162c service nova] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Updating instance_info_cache with network_info: [{"id": "4556aa7b-156a-4ab0-838d-aed88aa7449f", "address": "fa:16:3e:c4:58:31", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4556aa7b-15", "ovs_interfaceid": "4556aa7b-156a-4ab0-838d-aed88aa7449f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2207.273227] env[63279]: DEBUG nova.network.neutron [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Successfully created port: 482c0084-ab15-4c03-beb3-585f9e66c683 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2207.312579] env[63279]: DEBUG nova.scheduler.client.report [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2207.388541] env[63279]: DEBUG nova.network.neutron [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2207.424050] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087760, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.440894] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d9aff1-010d-8185-68fe-40b1fd4ccb91, 'name': SearchDatastore_Task, 'duration_secs': 0.011911} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.440894] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb2f85ae-619f-457a-8795-b26d76907cdf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.445321] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2207.445321] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ca69af-eccb-5260-1c96-030bcf7ed34f" [ 2207.445321] env[63279]: _type = "Task" [ 2207.445321] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.454266] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ca69af-eccb-5260-1c96-030bcf7ed34f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.463412] env[63279]: DEBUG oslo_concurrency.lockutils [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2207.683971] env[63279]: DEBUG oslo_concurrency.lockutils [req-f0757c33-1d68-47d0-a899-31e9f548e91a req-828c2043-f33f-4491-bc0f-a4198fb6162c service nova] Releasing lock "refresh_cache-f18d3019-bf1f-4519-a824-7ca80458d793" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2207.709092] env[63279]: DEBUG nova.network.neutron [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updating instance_info_cache with network_info: [{"id": "0bd52488-d960-42c9-8077-fc8fe9eda956", "address": "fa:16:3e:16:45:3c", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bd52488-d9", "ovs_interfaceid": "0bd52488-d960-42c9-8077-fc8fe9eda956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2207.818367] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.831s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2207.823539] env[63279]: DEBUG oslo_concurrency.lockutils [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 8.937s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2207.852709] env[63279]: INFO nova.scheduler.client.report [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Deleted allocations for instance 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5 [ 2207.922951] env[63279]: DEBUG oslo_vmware.api [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087760, 'name': PowerOnVM_Task, 'duration_secs': 0.788836} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.923310] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2207.923520] env[63279]: INFO nova.compute.manager [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Took 10.67 seconds to spawn the instance on the hypervisor. [ 2207.923732] env[63279]: DEBUG nova.compute.manager [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2207.924573] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ee0606-4c5a-411f-a0c4-f070e67491ac {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.961146] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ca69af-eccb-5260-1c96-030bcf7ed34f, 'name': SearchDatastore_Task, 'duration_secs': 0.013852} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.961430] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2207.961745] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] f18d3019-bf1f-4519-a824-7ca80458d793/f18d3019-bf1f-4519-a824-7ca80458d793.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2207.961939] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4adedd3-2aec-4788-949f-0652de5b6af8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.968482] env[63279]: DEBUG nova.compute.manager [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2207.969329] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f43e006-30ec-4c62-adf3-ffa724bb5299 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.975982] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2207.975982] env[63279]: value = "task-2087761" [ 2207.975982] env[63279]: _type = "Task" [ 2207.975982] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.990208] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087761, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.994028] env[63279]: DEBUG nova.compute.manager [req-5e381dc6-caac-469e-9315-31be6ee2e63d req-220cbb1d-7e3b-462d-87bf-7506febdc68f service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Received event network-changed-162a4844-743a-4cba-b137-f35170a3d072 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2207.994271] env[63279]: DEBUG nova.compute.manager [req-5e381dc6-caac-469e-9315-31be6ee2e63d req-220cbb1d-7e3b-462d-87bf-7506febdc68f service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Refreshing instance network info cache due to event network-changed-162a4844-743a-4cba-b137-f35170a3d072. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2207.994568] env[63279]: DEBUG oslo_concurrency.lockutils [req-5e381dc6-caac-469e-9315-31be6ee2e63d req-220cbb1d-7e3b-462d-87bf-7506febdc68f service nova] Acquiring lock "refresh_cache-9b98a316-71da-45fb-b895-553f179fe7d9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2207.994759] env[63279]: DEBUG oslo_concurrency.lockutils [req-5e381dc6-caac-469e-9315-31be6ee2e63d req-220cbb1d-7e3b-462d-87bf-7506febdc68f service nova] Acquired lock "refresh_cache-9b98a316-71da-45fb-b895-553f179fe7d9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2207.994970] env[63279]: DEBUG nova.network.neutron [req-5e381dc6-caac-469e-9315-31be6ee2e63d req-220cbb1d-7e3b-462d-87bf-7506febdc68f service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Refreshing network info cache for port 162a4844-743a-4cba-b137-f35170a3d072 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2208.011439] env[63279]: DEBUG nova.compute.manager [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2208.041034] env[63279]: DEBUG nova.virt.hardware [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2208.041335] env[63279]: DEBUG nova.virt.hardware [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2208.041514] env[63279]: DEBUG nova.virt.hardware [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2208.041706] env[63279]: DEBUG nova.virt.hardware [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2208.041872] env[63279]: DEBUG nova.virt.hardware [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2208.042039] env[63279]: DEBUG nova.virt.hardware [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2208.042312] env[63279]: DEBUG nova.virt.hardware [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2208.042603] env[63279]: DEBUG nova.virt.hardware [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2208.042874] env[63279]: DEBUG nova.virt.hardware [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2208.043223] env[63279]: DEBUG nova.virt.hardware [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2208.043564] env[63279]: DEBUG nova.virt.hardware [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2208.044584] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1baf1f32-e3da-464b-8a45-00d083a89ac0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.053130] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b97aa4-ab24-4fe3-b8cc-0599e6cdb201 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.212498] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2208.216511] env[63279]: DEBUG nova.compute.manager [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Instance network_info: |[{"id": "0bd52488-d960-42c9-8077-fc8fe9eda956", "address": "fa:16:3e:16:45:3c", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bd52488-d9", "ovs_interfaceid": "0bd52488-d960-42c9-8077-fc8fe9eda956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2208.216511] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:45:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '41278529-8bd2-44a1-97c8-03967faa3ff7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0bd52488-d960-42c9-8077-fc8fe9eda956', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2208.224129] env[63279]: DEBUG oslo.service.loopingcall [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2208.224445] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2208.224777] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc3e9bbe-f55f-41af-8484-91bf95d75b08 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.247893] env[63279]: DEBUG nova.compute.manager [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Received event network-vif-plugged-0bd52488-d960-42c9-8077-fc8fe9eda956 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2208.248130] env[63279]: DEBUG oslo_concurrency.lockutils [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] Acquiring lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2208.248342] env[63279]: DEBUG oslo_concurrency.lockutils [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] Lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2208.248508] env[63279]: DEBUG oslo_concurrency.lockutils [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] Lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2208.248735] env[63279]: DEBUG nova.compute.manager [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] No waiting events found dispatching network-vif-plugged-0bd52488-d960-42c9-8077-fc8fe9eda956 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2208.248831] env[63279]: WARNING nova.compute.manager [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Received unexpected event network-vif-plugged-0bd52488-d960-42c9-8077-fc8fe9eda956 for instance with vm_state building and task_state spawning. [ 2208.248988] env[63279]: DEBUG nova.compute.manager [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Received event network-changed-0bd52488-d960-42c9-8077-fc8fe9eda956 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2208.249157] env[63279]: DEBUG nova.compute.manager [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Refreshing instance network info cache due to event network-changed-0bd52488-d960-42c9-8077-fc8fe9eda956. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2208.249450] env[63279]: DEBUG oslo_concurrency.lockutils [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] Acquiring lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2208.249525] env[63279]: DEBUG oslo_concurrency.lockutils [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] Acquired lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2208.249711] env[63279]: DEBUG nova.network.neutron [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Refreshing network info cache for port 0bd52488-d960-42c9-8077-fc8fe9eda956 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2208.253918] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2208.253918] env[63279]: value = "task-2087762" [ 2208.253918] env[63279]: _type = "Task" [ 2208.253918] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.266349] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087762, 'name': CreateVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.360846] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f1b5e7c2-edce-4a9b-b5a8-d1c8f7da6eb4 tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "75f5b49f-14e7-4a8e-a0cb-b955edc13dd5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.050s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2208.449584] env[63279]: INFO nova.compute.manager [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Took 23.21 seconds to build instance. [ 2208.494952] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087761, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.620337] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a5f417-310e-425f-b75b-1e3452b64c2a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.630196] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6870849-4ca2-4128-9e85-6f655a838d70 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.666249] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ca7d66-50d3-4fce-b7a3-f504eaccd767 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.678284] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29f325f-565f-4fe3-ad18-d1b97a37430d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.695771] env[63279]: DEBUG nova.compute.provider_tree [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2208.763314] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087762, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.788366] env[63279]: DEBUG nova.network.neutron [req-5e381dc6-caac-469e-9315-31be6ee2e63d req-220cbb1d-7e3b-462d-87bf-7506febdc68f service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Updated VIF entry in instance network info cache for port 162a4844-743a-4cba-b137-f35170a3d072. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2208.788882] env[63279]: DEBUG nova.network.neutron [req-5e381dc6-caac-469e-9315-31be6ee2e63d req-220cbb1d-7e3b-462d-87bf-7506febdc68f service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Updating instance_info_cache with network_info: [{"id": "162a4844-743a-4cba-b137-f35170a3d072", "address": "fa:16:3e:a1:38:39", "network": {"id": "7b4d0149-03c5-4c40-ba16-d705499cd558", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1026068065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9c0a9c34ba408c829c0b50f3592bb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap162a4844-74", "ovs_interfaceid": "162a4844-743a-4cba-b137-f35170a3d072", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2208.951516] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d37d85e0-dc3a-4353-a13a-f209b1ba7eb7 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "14c698c8-7459-4843-bb19-f915742e3e53" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.753s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2208.991191] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087761, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527122} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.991914] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] f18d3019-bf1f-4519-a824-7ca80458d793/f18d3019-bf1f-4519-a824-7ca80458d793.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2208.991914] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2208.992145] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e6e13bf-a5ef-4dd9-a8a8-e58900d71f63 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.000238] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbda0f7f-c75e-4cb5-9b1a-8a1cd343a530 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.003967] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2209.003967] env[63279]: value = "task-2087763" [ 2209.003967] env[63279]: _type = "Task" [ 2209.003967] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.011991] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Doing hard reboot of VM {{(pid=63279) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 2209.012776] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-8d9516bd-4417-415a-834f-18b1d8c139e9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.018711] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087763, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.023985] env[63279]: DEBUG oslo_vmware.api [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2209.023985] env[63279]: value = "task-2087764" [ 2209.023985] env[63279]: _type = "Task" [ 2209.023985] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.032524] env[63279]: DEBUG oslo_vmware.api [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087764, 'name': ResetVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.080577] env[63279]: DEBUG nova.network.neutron [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updated VIF entry in instance network info cache for port 0bd52488-d960-42c9-8077-fc8fe9eda956. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2209.081038] env[63279]: DEBUG nova.network.neutron [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updating instance_info_cache with network_info: [{"id": "0bd52488-d960-42c9-8077-fc8fe9eda956", "address": "fa:16:3e:16:45:3c", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bd52488-d9", "ovs_interfaceid": "0bd52488-d960-42c9-8077-fc8fe9eda956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2209.199763] env[63279]: DEBUG nova.scheduler.client.report [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2209.270508] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087762, 'name': CreateVM_Task, 'duration_secs': 0.697707} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.270697] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2209.271641] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2209.272286] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2209.272626] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2209.272900] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7e66769-dab9-49cb-9600-747b4092e55d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.278310] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2209.278310] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5285ae4d-7bcc-ee08-51aa-1351533d216c" [ 2209.278310] env[63279]: _type = "Task" [ 2209.278310] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.288889] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5285ae4d-7bcc-ee08-51aa-1351533d216c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.291515] env[63279]: DEBUG oslo_concurrency.lockutils [req-5e381dc6-caac-469e-9315-31be6ee2e63d req-220cbb1d-7e3b-462d-87bf-7506febdc68f service nova] Releasing lock "refresh_cache-9b98a316-71da-45fb-b895-553f179fe7d9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2209.454079] env[63279]: DEBUG nova.network.neutron [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Successfully updated port: 482c0084-ab15-4c03-beb3-585f9e66c683 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2209.517475] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087763, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077588} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.517868] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2209.518673] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f05875b-1a7d-473c-9391-7b87f6c2ef74 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.545035] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] f18d3019-bf1f-4519-a824-7ca80458d793/f18d3019-bf1f-4519-a824-7ca80458d793.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2209.548201] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-335114ff-07ed-4ea4-8ce5-b12cad16fe0b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.568277] env[63279]: DEBUG oslo_vmware.api [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087764, 'name': ResetVM_Task, 'duration_secs': 0.102418} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.569449] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Did hard reboot of VM {{(pid=63279) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 2209.569656] env[63279]: DEBUG nova.compute.manager [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2209.569999] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2209.569999] env[63279]: value = "task-2087765" [ 2209.569999] env[63279]: _type = "Task" [ 2209.569999] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.570681] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7578fbdc-9697-48c7-9088-5324b0f53851 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.584554] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087765, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.587763] env[63279]: DEBUG oslo_concurrency.lockutils [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] Releasing lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2209.588061] env[63279]: DEBUG nova.compute.manager [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Received event network-changed-0f4606f9-e916-43fe-8ad4-57247bfb98a2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2209.588242] env[63279]: DEBUG nova.compute.manager [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Refreshing instance network info cache due to event network-changed-0f4606f9-e916-43fe-8ad4-57247bfb98a2. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2209.588479] env[63279]: DEBUG oslo_concurrency.lockutils [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] Acquiring lock "refresh_cache-27e2917d-3cd0-4ad3-ab65-f85f7d97225f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2209.588651] env[63279]: DEBUG oslo_concurrency.lockutils [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] Acquired lock "refresh_cache-27e2917d-3cd0-4ad3-ab65-f85f7d97225f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2209.588848] env[63279]: DEBUG nova.network.neutron [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Refreshing network info cache for port 0f4606f9-e916-43fe-8ad4-57247bfb98a2 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2209.747106] env[63279]: DEBUG nova.compute.manager [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2209.748081] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68136773-cee1-4f6a-98e7-19bbc4f3d6bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.788437] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5285ae4d-7bcc-ee08-51aa-1351533d216c, 'name': SearchDatastore_Task, 'duration_secs': 0.031806} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.788758] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2209.789326] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2209.789326] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2209.789417] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2209.789560] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2209.789848] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f62d6272-cac1-41fb-b647-68d5a230ea37 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.803740] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2209.803949] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2209.804778] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b91b7e8b-8220-42bb-9e3d-73d7c7c195c9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.811320] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2209.811320] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5288755a-56bd-6f75-4b6c-d8e64031e22a" [ 2209.811320] env[63279]: _type = "Task" [ 2209.811320] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.819328] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5288755a-56bd-6f75-4b6c-d8e64031e22a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.956492] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "refresh_cache-69b3269a-2ba3-4f5f-a29c-62518c93da3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2209.956649] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "refresh_cache-69b3269a-2ba3-4f5f-a29c-62518c93da3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2209.956806] env[63279]: DEBUG nova.network.neutron [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2210.083066] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.088629] env[63279]: DEBUG oslo_concurrency.lockutils [None req-485e178e-d558-4a6b-a8cf-935aa1c4ec1f tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.430s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2210.210692] env[63279]: DEBUG oslo_concurrency.lockutils [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.387s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2210.213825] env[63279]: DEBUG oslo_concurrency.lockutils [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.164s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2210.214094] env[63279]: DEBUG nova.objects.instance [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lazy-loading 'resources' on Instance uuid a15141bc-424d-48ca-a6d5-c859a3639a0b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2210.262741] env[63279]: INFO nova.compute.manager [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] instance snapshotting [ 2210.268324] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df15827-be80-4765-9ee2-cb953ddb5d9e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.290148] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71917c1-8cdb-408f-a8c4-dd607c38e174 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.293650] env[63279]: DEBUG nova.compute.manager [req-a552e21e-2faf-40fd-b684-53284caebafa req-8a9fdee6-1224-4b4c-80d9-cce731eb2a01 service nova] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Received event network-vif-plugged-482c0084-ab15-4c03-beb3-585f9e66c683 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2210.293867] env[63279]: DEBUG oslo_concurrency.lockutils [req-a552e21e-2faf-40fd-b684-53284caebafa req-8a9fdee6-1224-4b4c-80d9-cce731eb2a01 service nova] Acquiring lock "69b3269a-2ba3-4f5f-a29c-62518c93da3d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2210.294228] env[63279]: DEBUG oslo_concurrency.lockutils [req-a552e21e-2faf-40fd-b684-53284caebafa req-8a9fdee6-1224-4b4c-80d9-cce731eb2a01 service nova] Lock "69b3269a-2ba3-4f5f-a29c-62518c93da3d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2210.294268] env[63279]: DEBUG oslo_concurrency.lockutils [req-a552e21e-2faf-40fd-b684-53284caebafa req-8a9fdee6-1224-4b4c-80d9-cce731eb2a01 service nova] Lock "69b3269a-2ba3-4f5f-a29c-62518c93da3d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2210.294419] env[63279]: DEBUG nova.compute.manager [req-a552e21e-2faf-40fd-b684-53284caebafa req-8a9fdee6-1224-4b4c-80d9-cce731eb2a01 service nova] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] No waiting events found dispatching network-vif-plugged-482c0084-ab15-4c03-beb3-585f9e66c683 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2210.294584] env[63279]: WARNING nova.compute.manager [req-a552e21e-2faf-40fd-b684-53284caebafa req-8a9fdee6-1224-4b4c-80d9-cce731eb2a01 service nova] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Received unexpected event network-vif-plugged-482c0084-ab15-4c03-beb3-585f9e66c683 for instance with vm_state building and task_state spawning. [ 2210.294746] env[63279]: DEBUG nova.compute.manager [req-a552e21e-2faf-40fd-b684-53284caebafa req-8a9fdee6-1224-4b4c-80d9-cce731eb2a01 service nova] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Received event network-changed-482c0084-ab15-4c03-beb3-585f9e66c683 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2210.294901] env[63279]: DEBUG nova.compute.manager [req-a552e21e-2faf-40fd-b684-53284caebafa req-8a9fdee6-1224-4b4c-80d9-cce731eb2a01 service nova] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Refreshing instance network info cache due to event network-changed-482c0084-ab15-4c03-beb3-585f9e66c683. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2210.295078] env[63279]: DEBUG oslo_concurrency.lockutils [req-a552e21e-2faf-40fd-b684-53284caebafa req-8a9fdee6-1224-4b4c-80d9-cce731eb2a01 service nova] Acquiring lock "refresh_cache-69b3269a-2ba3-4f5f-a29c-62518c93da3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2210.321438] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5288755a-56bd-6f75-4b6c-d8e64031e22a, 'name': SearchDatastore_Task, 'duration_secs': 0.043863} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2210.322251] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-907be7ca-1deb-4c61-84b3-af1d123d710d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.327293] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2210.327293] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ca16ac-31f4-6822-c925-3d0a2f7056e5" [ 2210.327293] env[63279]: _type = "Task" [ 2210.327293] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2210.335077] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ca16ac-31f4-6822-c925-3d0a2f7056e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.346669] env[63279]: DEBUG nova.network.neutron [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Updated VIF entry in instance network info cache for port 0f4606f9-e916-43fe-8ad4-57247bfb98a2. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2210.347017] env[63279]: DEBUG nova.network.neutron [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Updating instance_info_cache with network_info: [{"id": "0f4606f9-e916-43fe-8ad4-57247bfb98a2", "address": "fa:16:3e:2b:f5:46", "network": {"id": "5e9abed5-377b-4644-a91c-52b6e1cc7c00", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-235825661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "421b189f81304db9b91f30c710dc2f30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f4606f9-e9", "ovs_interfaceid": "0f4606f9-e916-43fe-8ad4-57247bfb98a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2210.502216] env[63279]: DEBUG nova.network.neutron [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2210.583847] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087765, 'name': ReconfigVM_Task, 'duration_secs': 0.715207} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2210.584302] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Reconfigured VM instance instance-00000057 to attach disk [datastore1] f18d3019-bf1f-4519-a824-7ca80458d793/f18d3019-bf1f-4519-a824-7ca80458d793.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2210.584945] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72e30048-bb90-4c05-8db0-6b086eaa49b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.591701] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2210.591701] env[63279]: value = "task-2087766" [ 2210.591701] env[63279]: _type = "Task" [ 2210.591701] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2210.602469] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087766, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.701239] env[63279]: DEBUG nova.network.neutron [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Updating instance_info_cache with network_info: [{"id": "482c0084-ab15-4c03-beb3-585f9e66c683", "address": "fa:16:3e:41:26:b2", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap482c0084-ab", "ovs_interfaceid": "482c0084-ab15-4c03-beb3-585f9e66c683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2210.720360] env[63279]: DEBUG nova.objects.instance [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lazy-loading 'numa_topology' on Instance uuid a15141bc-424d-48ca-a6d5-c859a3639a0b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2210.781320] env[63279]: INFO nova.scheduler.client.report [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleted allocation for migration 90100f45-a8e0-4d84-adb1-bbecb55989d8 [ 2210.803683] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2210.804188] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6a7030f5-d89f-460e-904a-6c25db3db0b2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.811125] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2210.811125] env[63279]: value = "task-2087767" [ 2210.811125] env[63279]: _type = "Task" [ 2210.811125] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2210.819274] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087767, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.836974] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ca16ac-31f4-6822-c925-3d0a2f7056e5, 'name': SearchDatastore_Task, 'duration_secs': 0.011901} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2210.837282] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2210.837545] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6/9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2210.837944] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a733047-06ac-4bab-a388-ea8e0d17d597 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.843830] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2210.843830] env[63279]: value = "task-2087768" [ 2210.843830] env[63279]: _type = "Task" [ 2210.843830] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2210.852189] env[63279]: DEBUG oslo_concurrency.lockutils [req-85b6acef-b5c9-4688-b402-95a09c3294ce req-8e4345f9-e8ef-4526-9bbf-f1cdf3aaa454 service nova] Releasing lock "refresh_cache-27e2917d-3cd0-4ad3-ab65-f85f7d97225f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2210.852614] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087768, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.101893] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087766, 'name': Rename_Task, 'duration_secs': 0.175358} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2211.102196] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2211.102445] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03887174-347a-4ff5-a43e-e33b31ffa52b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.108304] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2211.108304] env[63279]: value = "task-2087769" [ 2211.108304] env[63279]: _type = "Task" [ 2211.108304] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2211.116126] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087769, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.206434] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "refresh_cache-69b3269a-2ba3-4f5f-a29c-62518c93da3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2211.206734] env[63279]: DEBUG nova.compute.manager [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Instance network_info: |[{"id": "482c0084-ab15-4c03-beb3-585f9e66c683", "address": "fa:16:3e:41:26:b2", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap482c0084-ab", "ovs_interfaceid": "482c0084-ab15-4c03-beb3-585f9e66c683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2211.207051] env[63279]: DEBUG oslo_concurrency.lockutils [req-a552e21e-2faf-40fd-b684-53284caebafa req-8a9fdee6-1224-4b4c-80d9-cce731eb2a01 service nova] Acquired lock "refresh_cache-69b3269a-2ba3-4f5f-a29c-62518c93da3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2211.207243] env[63279]: DEBUG nova.network.neutron [req-a552e21e-2faf-40fd-b684-53284caebafa req-8a9fdee6-1224-4b4c-80d9-cce731eb2a01 service nova] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Refreshing network info cache for port 482c0084-ab15-4c03-beb3-585f9e66c683 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2211.208450] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:26:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9fa933df-d66f-485e-8cf9-eda7f1a7f283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '482c0084-ab15-4c03-beb3-585f9e66c683', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2211.216207] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Creating folder: Project (4fcef39e334249afb9636455802059c5). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2211.220071] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61486f14-b063-4ee8-881f-7f4a8c5df676 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.222458] env[63279]: DEBUG nova.objects.base [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2211.234704] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Created folder: Project (4fcef39e334249afb9636455802059c5) in parent group-v427491. [ 2211.234976] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Creating folder: Instances. Parent ref: group-v427732. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2211.235260] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc8df4d5-cfd4-4fae-b502-e62f1bbae132 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.249884] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Created folder: Instances in parent group-v427732. [ 2211.250497] env[63279]: DEBUG oslo.service.loopingcall [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2211.250719] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2211.250953] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-069e86df-a1a6-49fc-b0e2-d7a4764b198d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.275660] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2211.275660] env[63279]: value = "task-2087772" [ 2211.275660] env[63279]: _type = "Task" [ 2211.275660] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2211.286280] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087772, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.289394] env[63279]: DEBUG oslo_concurrency.lockutils [None req-deafd1a3-1917-46b5-a734-532a815b3d5e tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 15.330s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2211.324754] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087767, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.356787] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087768, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.494728] env[63279]: DEBUG nova.network.neutron [req-a552e21e-2faf-40fd-b684-53284caebafa req-8a9fdee6-1224-4b4c-80d9-cce731eb2a01 service nova] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Updated VIF entry in instance network info cache for port 482c0084-ab15-4c03-beb3-585f9e66c683. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2211.495698] env[63279]: DEBUG nova.network.neutron [req-a552e21e-2faf-40fd-b684-53284caebafa req-8a9fdee6-1224-4b4c-80d9-cce731eb2a01 service nova] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Updating instance_info_cache with network_info: [{"id": "482c0084-ab15-4c03-beb3-585f9e66c683", "address": "fa:16:3e:41:26:b2", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap482c0084-ab", "ovs_interfaceid": "482c0084-ab15-4c03-beb3-585f9e66c683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2211.506594] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2211.506594] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2211.509918] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca0fbc23-1ea9-4a28-9d3a-bf66e0fcbd90 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.523129] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54022b62-cc13-44de-a947-ed54af02cd1b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.561309] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306f29f8-ab6f-4cfd-9f3c-fac162f00581 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.572925] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb36cd27-5b9f-488a-b212-015591558fe6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.591331] env[63279]: DEBUG nova.compute.provider_tree [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2211.620081] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087769, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.642033] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2211.642284] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2211.664885] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2211.664885] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2211.664885] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2211.664885] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2211.664885] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2211.666093] env[63279]: INFO nova.compute.manager [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Terminating instance [ 2211.785368] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087772, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.823324] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087767, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.853445] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087768, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.884314} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2211.853726] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6/9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2211.853938] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2211.854212] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2ba0745-5695-4c3c-a1d4-59afe29d2a3d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.860570] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2211.860570] env[63279]: value = "task-2087773" [ 2211.860570] env[63279]: _type = "Task" [ 2211.860570] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2211.868880] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087773, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.999449] env[63279]: DEBUG oslo_concurrency.lockutils [req-a552e21e-2faf-40fd-b684-53284caebafa req-8a9fdee6-1224-4b4c-80d9-cce731eb2a01 service nova] Releasing lock "refresh_cache-69b3269a-2ba3-4f5f-a29c-62518c93da3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2212.014510] env[63279]: DEBUG nova.compute.manager [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2212.113487] env[63279]: ERROR nova.scheduler.client.report [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [req-dd45b4fc-7498-47e4-b317-6a5e517f32d2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dd45b4fc-7498-47e4-b317-6a5e517f32d2"}]} [ 2212.130065] env[63279]: DEBUG oslo_vmware.api [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087769, 'name': PowerOnVM_Task, 'duration_secs': 0.819418} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2212.130433] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2212.130645] env[63279]: INFO nova.compute.manager [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Took 8.86 seconds to spawn the instance on the hypervisor. [ 2212.130865] env[63279]: DEBUG nova.compute.manager [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2212.131750] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7977106-975a-43b8-87ad-917321c1083c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.134963] env[63279]: DEBUG nova.scheduler.client.report [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2212.152098] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2212.156054] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 2212.156054] env[63279]: DEBUG nova.scheduler.client.report [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2212.156054] env[63279]: DEBUG nova.compute.provider_tree [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2212.165893] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "9a6abf10-ed12-49c6-aa16-f428da9f9f9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2212.166142] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "9a6abf10-ed12-49c6-aa16-f428da9f9f9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2212.168649] env[63279]: DEBUG nova.scheduler.client.report [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2212.171270] env[63279]: DEBUG nova.compute.manager [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2212.171469] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2212.172346] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad8dde1-3c13-4914-8486-6ab9a4478177 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.180709] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2212.181267] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed23c658-2898-4199-b1dc-a5697dd20b2b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.187458] env[63279]: DEBUG oslo_vmware.api [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2212.187458] env[63279]: value = "task-2087774" [ 2212.187458] env[63279]: _type = "Task" [ 2212.187458] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2212.194546] env[63279]: DEBUG nova.scheduler.client.report [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2212.203033] env[63279]: DEBUG oslo_vmware.api [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087774, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.286504] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087772, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.324755] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087767, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.370978] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087773, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068053} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2212.373412] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2212.374366] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6680fbdc-e7ee-4092-a118-be5c580823d8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.396495] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6/9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2212.400382] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd0d9045-6f23-4fb8-8ff5-c395f609f35b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.418201] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2212.418201] env[63279]: value = "task-2087775" [ 2212.418201] env[63279]: _type = "Task" [ 2212.418201] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2212.428026] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087775, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.438607] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e75e7fb-074f-4e47-a5fa-1cd5559a68e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.446057] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a199efd-2b11-48bf-a64a-abd977bcfb8c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.477387] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f539fdac-7970-4d92-ba48-c1c4739ff872 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.484705] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e597d477-7089-49fe-bcb7-7d983dfab177 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.499611] env[63279]: DEBUG nova.compute.provider_tree [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2212.535566] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2212.652914] env[63279]: INFO nova.compute.manager [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Took 22.97 seconds to build instance. [ 2212.668381] env[63279]: DEBUG nova.compute.manager [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2212.698953] env[63279]: DEBUG oslo_vmware.api [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087774, 'name': PowerOffVM_Task, 'duration_secs': 0.332379} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2212.699939] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2212.699939] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2212.700131] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9737b308-680e-499c-96a5-851bf35a0e1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.787554] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087772, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.824159] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087767, 'name': CreateSnapshot_Task, 'duration_secs': 1.763947} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2212.824439] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2212.825188] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-effcf8a0-8de9-414a-9892-c5212135ef90 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.929648] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087775, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.002733] env[63279]: DEBUG nova.scheduler.client.report [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2213.155337] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e777e00-fd9d-4c85-a1de-a0c92a46f747 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "f18d3019-bf1f-4519-a824-7ca80458d793" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.482s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2213.186288] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2213.292788] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087772, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.347052] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2213.347505] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f21553be-7acd-433b-b2fa-39b304e44733 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.363579] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2213.363579] env[63279]: value = "task-2087777" [ 2213.363579] env[63279]: _type = "Task" [ 2213.363579] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2213.374609] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087777, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.428985] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087775, 'name': ReconfigVM_Task, 'duration_secs': 0.948521} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2213.429137] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6/9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2213.429771] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d9594e07-7322-4a36-ace9-9799926aee1d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.435253] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2213.435253] env[63279]: value = "task-2087778" [ 2213.435253] env[63279]: _type = "Task" [ 2213.435253] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2213.443729] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087778, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.507414] env[63279]: DEBUG oslo_concurrency.lockutils [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.293s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2213.510654] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.092s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2213.512284] env[63279]: INFO nova.compute.claims [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2213.726863] env[63279]: DEBUG oslo_concurrency.lockutils [None req-eb4c271b-566b-45e0-a5b3-e42f43986536 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "f18d3019-bf1f-4519-a824-7ca80458d793" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2213.727204] env[63279]: DEBUG oslo_concurrency.lockutils [None req-eb4c271b-566b-45e0-a5b3-e42f43986536 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "f18d3019-bf1f-4519-a824-7ca80458d793" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2213.727400] env[63279]: DEBUG nova.compute.manager [None req-eb4c271b-566b-45e0-a5b3-e42f43986536 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2213.728884] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e8d3bc-f016-4b7c-ae9c-beee6eb9dae5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.735495] env[63279]: DEBUG nova.compute.manager [None req-eb4c271b-566b-45e0-a5b3-e42f43986536 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2213.736149] env[63279]: DEBUG nova.objects.instance [None req-eb4c271b-566b-45e0-a5b3-e42f43986536 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lazy-loading 'flavor' on Instance uuid f18d3019-bf1f-4519-a824-7ca80458d793 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2213.792898] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087772, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.874677] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087777, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.946115] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087778, 'name': Rename_Task, 'duration_secs': 0.309648} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2213.946461] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2213.946648] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d71a341-584d-4c46-b63e-28fd20ca321e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.953603] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2213.953603] env[63279]: value = "task-2087779" [ 2213.953603] env[63279]: _type = "Task" [ 2213.953603] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2213.964120] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087779, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.021212] env[63279]: DEBUG oslo_concurrency.lockutils [None req-208396b7-55ea-4701-90ef-c6d3ea22cae9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 34.710s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2214.022101] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 10.636s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.022304] env[63279]: INFO nova.compute.manager [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Unshelving [ 2214.293460] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087772, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.377044] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087777, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.464357] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087779, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.666517] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 2214.666795] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2214.666992] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2214.667196] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2214.667361] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2214.667508] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2214.667657] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2214.667874] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 2214.668037] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2214.746219] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb4c271b-566b-45e0-a5b3-e42f43986536 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2214.746833] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba3cdcd2-9d5f-47a7-a7a2-329b39ca8094 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.754525] env[63279]: DEBUG oslo_vmware.api [None req-eb4c271b-566b-45e0-a5b3-e42f43986536 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2214.754525] env[63279]: value = "task-2087780" [ 2214.754525] env[63279]: _type = "Task" [ 2214.754525] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2214.767937] env[63279]: DEBUG oslo_vmware.api [None req-eb4c271b-566b-45e0-a5b3-e42f43986536 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087780, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.798265] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087772, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.808845] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008fe972-d35c-493c-84e9-f70e66ddd213 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.816579] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845504ee-841e-4d42-8a4f-b9e1f9ab068e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.853362] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b2c479-1770-47e5-a722-3db57d364253 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.861852] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90fa9cd-d48e-46df-b71f-e9d6b1ac2795 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.883733] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087777, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.884312] env[63279]: DEBUG nova.compute.provider_tree [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2214.965486] env[63279]: DEBUG oslo_vmware.api [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087779, 'name': PowerOnVM_Task, 'duration_secs': 0.946085} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2214.965840] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2214.966121] env[63279]: INFO nova.compute.manager [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Took 9.32 seconds to spawn the instance on the hypervisor. [ 2214.966318] env[63279]: DEBUG nova.compute.manager [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2214.967133] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c534871f-a576-49a8-9a51-cf2b8bfdd1ea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.036277] env[63279]: DEBUG nova.compute.utils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2215.172258] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2215.264736] env[63279]: DEBUG oslo_vmware.api [None req-eb4c271b-566b-45e0-a5b3-e42f43986536 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087780, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.294563] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087772, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.376352] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087777, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.387504] env[63279]: DEBUG nova.scheduler.client.report [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2215.487954] env[63279]: INFO nova.compute.manager [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Took 23.17 seconds to build instance. [ 2215.542026] env[63279]: INFO nova.virt.block_device [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Booting with volume d14dfff0-b332-4aa4-94bb-62b564929c56 at /dev/sdb [ 2215.586028] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d8a944d9-830d-4000-af02-cc4e92d54e90 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.595403] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e8d247-b9eb-4323-92e9-050fc5369bd3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.627531] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f832f427-30f5-4b7c-b00f-bceccbf3c609 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.635908] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281f6f2c-bdb5-49f2-8008-667fb4851ee6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.669039] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50616c80-06f0-44bf-bca7-e596e5f6aa52 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.675470] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82ad84d-5ba3-41b2-9d49-41c944a14444 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.689368] env[63279]: DEBUG nova.virt.block_device [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updating existing volume attachment record: 599569f0-2ce4-4f28-8a2d-6c3522d3dd3a {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2215.765112] env[63279]: DEBUG oslo_vmware.api [None req-eb4c271b-566b-45e0-a5b3-e42f43986536 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087780, 'name': PowerOffVM_Task, 'duration_secs': 0.576298} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2215.765407] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb4c271b-566b-45e0-a5b3-e42f43986536 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2215.765609] env[63279]: DEBUG nova.compute.manager [None req-eb4c271b-566b-45e0-a5b3-e42f43986536 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2215.766385] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8a6379-d675-4787-b055-00f0b7d92ff4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.794914] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087772, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.876736] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087777, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.893804] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.383s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2215.894383] env[63279]: DEBUG nova.compute.manager [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2215.897116] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.362s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2215.899053] env[63279]: INFO nova.compute.claims [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2215.989532] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0334c2c0-9949-449c-a374-eaaae4ef764a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.683s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2216.278069] env[63279]: DEBUG oslo_concurrency.lockutils [None req-eb4c271b-566b-45e0-a5b3-e42f43986536 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "f18d3019-bf1f-4519-a824-7ca80458d793" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.551s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2216.295784] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087772, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.377926] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087777, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.405136] env[63279]: DEBUG nova.compute.utils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2216.410669] env[63279]: DEBUG nova.compute.manager [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2216.410669] env[63279]: DEBUG nova.network.neutron [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2216.454314] env[63279]: DEBUG nova.policy [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6615b44501ff4c23a373c60aaf443f5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afe3ab970f3249719809afa9f9b11c1b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2216.458928] env[63279]: DEBUG nova.compute.manager [req-b9c37a21-f291-4b0c-bda4-8cdc0d37fb7c req-3a227be8-3fa7-4197-9883-7396862961ca service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Received event network-changed-0bd52488-d960-42c9-8077-fc8fe9eda956 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2216.459149] env[63279]: DEBUG nova.compute.manager [req-b9c37a21-f291-4b0c-bda4-8cdc0d37fb7c req-3a227be8-3fa7-4197-9883-7396862961ca service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Refreshing instance network info cache due to event network-changed-0bd52488-d960-42c9-8077-fc8fe9eda956. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2216.459371] env[63279]: DEBUG oslo_concurrency.lockutils [req-b9c37a21-f291-4b0c-bda4-8cdc0d37fb7c req-3a227be8-3fa7-4197-9883-7396862961ca service nova] Acquiring lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2216.459517] env[63279]: DEBUG oslo_concurrency.lockutils [req-b9c37a21-f291-4b0c-bda4-8cdc0d37fb7c req-3a227be8-3fa7-4197-9883-7396862961ca service nova] Acquired lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2216.459824] env[63279]: DEBUG nova.network.neutron [req-b9c37a21-f291-4b0c-bda4-8cdc0d37fb7c req-3a227be8-3fa7-4197-9883-7396862961ca service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Refreshing network info cache for port 0bd52488-d960-42c9-8077-fc8fe9eda956 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2216.797708] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087772, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.823409] env[63279]: DEBUG nova.network.neutron [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Successfully created port: c8b3dd94-c21a-424e-85d2-ea499e510f55 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2216.881076] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087777, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.912991] env[63279]: DEBUG nova.compute.manager [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2217.040636] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2217.040741] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2217.040919] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleting the datastore file [datastore1] 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2217.042423] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-151172a6-db35-4333-9b2b-9c853db7cb5f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.053308] env[63279]: DEBUG oslo_vmware.api [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2217.053308] env[63279]: value = "task-2087784" [ 2217.053308] env[63279]: _type = "Task" [ 2217.053308] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.068027] env[63279]: DEBUG oslo_vmware.api [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087784, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.227498] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22b24b2-d121-4c30-a719-fe6f45443e8a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.235833] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4725f2e1-f315-4f00-9e28-32b4fb1f1d82 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.265838] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "f18d3019-bf1f-4519-a824-7ca80458d793" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2217.266130] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "f18d3019-bf1f-4519-a824-7ca80458d793" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2217.266372] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "f18d3019-bf1f-4519-a824-7ca80458d793-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2217.266579] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "f18d3019-bf1f-4519-a824-7ca80458d793-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2217.266793] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "f18d3019-bf1f-4519-a824-7ca80458d793-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2217.271381] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ebef78d-4ccb-459f-a4e1-87dfcd3d9aa4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.274096] env[63279]: INFO nova.compute.manager [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Terminating instance [ 2217.281652] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6ce370-62d3-4979-a419-add2fadf54a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.298766] env[63279]: DEBUG nova.compute.provider_tree [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2217.306046] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087772, 'name': CreateVM_Task, 'duration_secs': 5.97684} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2217.306046] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2217.306046] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2217.306238] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2217.306437] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2217.307188] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-758a41e4-69d5-46d6-9188-0b090ec6daf4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.312328] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2217.312328] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d54c65-39b5-6f4b-ecd0-2c03e8ee8840" [ 2217.312328] env[63279]: _type = "Task" [ 2217.312328] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.313184] env[63279]: DEBUG nova.network.neutron [req-b9c37a21-f291-4b0c-bda4-8cdc0d37fb7c req-3a227be8-3fa7-4197-9883-7396862961ca service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updated VIF entry in instance network info cache for port 0bd52488-d960-42c9-8077-fc8fe9eda956. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2217.313542] env[63279]: DEBUG nova.network.neutron [req-b9c37a21-f291-4b0c-bda4-8cdc0d37fb7c req-3a227be8-3fa7-4197-9883-7396862961ca service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updating instance_info_cache with network_info: [{"id": "0bd52488-d960-42c9-8077-fc8fe9eda956", "address": "fa:16:3e:16:45:3c", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bd52488-d9", "ovs_interfaceid": "0bd52488-d960-42c9-8077-fc8fe9eda956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2217.323285] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d54c65-39b5-6f4b-ecd0-2c03e8ee8840, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.378455] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087777, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.565387] env[63279]: DEBUG oslo_vmware.api [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087784, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159639} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2217.565953] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2217.565953] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2217.566175] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2217.566387] env[63279]: INFO nova.compute.manager [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Took 5.39 seconds to destroy the instance on the hypervisor. [ 2217.566662] env[63279]: DEBUG oslo.service.loopingcall [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2217.566902] env[63279]: DEBUG nova.compute.manager [-] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2217.567020] env[63279]: DEBUG nova.network.neutron [-] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2217.778430] env[63279]: DEBUG nova.compute.manager [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2217.778650] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2217.779575] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e740227-cb67-4e44-8351-2641021da946 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.788118] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2217.788118] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c57dcab8-d5ec-40ac-b0a5-0c12ff8d5ca7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.802020] env[63279]: DEBUG nova.scheduler.client.report [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2217.819248] env[63279]: DEBUG oslo_concurrency.lockutils [req-b9c37a21-f291-4b0c-bda4-8cdc0d37fb7c req-3a227be8-3fa7-4197-9883-7396862961ca service nova] Releasing lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2217.825585] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d54c65-39b5-6f4b-ecd0-2c03e8ee8840, 'name': SearchDatastore_Task, 'duration_secs': 0.011035} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2217.825880] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2217.826130] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2217.826368] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2217.826518] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2217.826696] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2217.826962] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6592b119-8277-4803-9253-6675849390ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.835917] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2217.836134] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2217.836864] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0876376f-949d-4985-8049-bd8b81263627 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.844203] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2217.844203] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]522ca633-6e92-5710-d8ea-438d12277d5c" [ 2217.844203] env[63279]: _type = "Task" [ 2217.844203] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.852019] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]522ca633-6e92-5710-d8ea-438d12277d5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.879485] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087777, 'name': CloneVM_Task} progress is 95%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.921818] env[63279]: DEBUG nova.compute.manager [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2217.947614] env[63279]: DEBUG nova.virt.hardware [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2217.947907] env[63279]: DEBUG nova.virt.hardware [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2217.948090] env[63279]: DEBUG nova.virt.hardware [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2217.948282] env[63279]: DEBUG nova.virt.hardware [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2217.948433] env[63279]: DEBUG nova.virt.hardware [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2217.948581] env[63279]: DEBUG nova.virt.hardware [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2217.948796] env[63279]: DEBUG nova.virt.hardware [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2217.948961] env[63279]: DEBUG nova.virt.hardware [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2217.949159] env[63279]: DEBUG nova.virt.hardware [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2217.949328] env[63279]: DEBUG nova.virt.hardware [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2217.949505] env[63279]: DEBUG nova.virt.hardware [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2217.950425] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af115ce-d9e4-4c9f-89d1-02a232122206 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.959278] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0417ae-5957-4b9b-955c-4fcdc50c58f7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.163731] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2218.164079] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2218.164350] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleting the datastore file [datastore1] f18d3019-bf1f-4519-a824-7ca80458d793 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2218.164683] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc063507-ab7b-47dd-94b2-a5c7b178e5ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.171883] env[63279]: DEBUG oslo_vmware.api [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2218.171883] env[63279]: value = "task-2087786" [ 2218.171883] env[63279]: _type = "Task" [ 2218.171883] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2218.182702] env[63279]: DEBUG oslo_vmware.api [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087786, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.300361] env[63279]: DEBUG nova.compute.manager [req-6866c4a9-41cc-4987-833b-5ef55bf5f894 req-e05c182e-19b1-4ee8-9be2-d369f149155d service nova] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Received event network-vif-plugged-c8b3dd94-c21a-424e-85d2-ea499e510f55 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2218.300757] env[63279]: DEBUG oslo_concurrency.lockutils [req-6866c4a9-41cc-4987-833b-5ef55bf5f894 req-e05c182e-19b1-4ee8-9be2-d369f149155d service nova] Acquiring lock "59ad6180-e561-41e3-86e4-37fc20819578-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2218.300942] env[63279]: DEBUG oslo_concurrency.lockutils [req-6866c4a9-41cc-4987-833b-5ef55bf5f894 req-e05c182e-19b1-4ee8-9be2-d369f149155d service nova] Lock "59ad6180-e561-41e3-86e4-37fc20819578-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2218.301298] env[63279]: DEBUG oslo_concurrency.lockutils [req-6866c4a9-41cc-4987-833b-5ef55bf5f894 req-e05c182e-19b1-4ee8-9be2-d369f149155d service nova] Lock "59ad6180-e561-41e3-86e4-37fc20819578-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2218.301547] env[63279]: DEBUG nova.compute.manager [req-6866c4a9-41cc-4987-833b-5ef55bf5f894 req-e05c182e-19b1-4ee8-9be2-d369f149155d service nova] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] No waiting events found dispatching network-vif-plugged-c8b3dd94-c21a-424e-85d2-ea499e510f55 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2218.301751] env[63279]: WARNING nova.compute.manager [req-6866c4a9-41cc-4987-833b-5ef55bf5f894 req-e05c182e-19b1-4ee8-9be2-d369f149155d service nova] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Received unexpected event network-vif-plugged-c8b3dd94-c21a-424e-85d2-ea499e510f55 for instance with vm_state building and task_state spawning. [ 2218.306397] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.409s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2218.306981] env[63279]: DEBUG nova.compute.manager [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2218.309877] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.124s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2218.311604] env[63279]: INFO nova.compute.claims [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2218.341496] env[63279]: DEBUG nova.network.neutron [-] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.356594] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]522ca633-6e92-5710-d8ea-438d12277d5c, 'name': SearchDatastore_Task, 'duration_secs': 0.010896} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.357455] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a179b7e-9fe1-4507-b3e0-0acbcb66c78d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.364367] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2218.364367] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]525dbf5a-ccf4-38d1-3846-3fed6ceddf14" [ 2218.364367] env[63279]: _type = "Task" [ 2218.364367] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2218.373222] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525dbf5a-ccf4-38d1-3846-3fed6ceddf14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.381189] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087777, 'name': CloneVM_Task, 'duration_secs': 4.918178} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.381472] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Created linked-clone VM from snapshot [ 2218.382232] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7031f7-ed5f-4767-b8f7-b998b8efb1de {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.389612] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Uploading image 36f26273-1ab5-494f-b233-e1bd72926c8f {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2218.393233] env[63279]: DEBUG nova.network.neutron [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Successfully updated port: c8b3dd94-c21a-424e-85d2-ea499e510f55 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2218.400708] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2218.401060] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-32a27186-c06e-4dfd-bd7c-466c80b08ecd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.408695] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2218.408695] env[63279]: value = "task-2087788" [ 2218.408695] env[63279]: _type = "Task" [ 2218.408695] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2218.418322] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087788, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.484263] env[63279]: DEBUG nova.compute.manager [req-ab0991a1-81d0-4a14-8d23-50fd836f51dd req-60bb1aca-1ae7-4f3f-b15e-539e75bdf434 service nova] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Received event network-vif-deleted-5a61eb6f-d235-4f00-a65b-76f20c2c3d8d {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2218.681944] env[63279]: DEBUG oslo_vmware.api [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087786, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209431} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.682937] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2218.682937] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2218.682937] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2218.682937] env[63279]: INFO nova.compute.manager [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Took 0.90 seconds to destroy the instance on the hypervisor. [ 2218.683140] env[63279]: DEBUG oslo.service.loopingcall [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2218.683215] env[63279]: DEBUG nova.compute.manager [-] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2218.683313] env[63279]: DEBUG nova.network.neutron [-] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2218.816329] env[63279]: DEBUG nova.compute.utils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2218.818034] env[63279]: DEBUG nova.compute.manager [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2218.818154] env[63279]: DEBUG nova.network.neutron [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2218.844112] env[63279]: INFO nova.compute.manager [-] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Took 1.28 seconds to deallocate network for instance. [ 2218.861559] env[63279]: DEBUG nova.policy [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d9d348f73904de5adc5d521c18bfb6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '161d1caf722349c188ca8fc647989c05', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2218.875324] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525dbf5a-ccf4-38d1-3846-3fed6ceddf14, 'name': SearchDatastore_Task, 'duration_secs': 0.01049} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.875658] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2218.876211] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 69b3269a-2ba3-4f5f-a29c-62518c93da3d/69b3269a-2ba3-4f5f-a29c-62518c93da3d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2218.876211] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73fad483-f3d4-4758-8dd0-e30895d9badd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.884278] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2218.884278] env[63279]: value = "task-2087789" [ 2218.884278] env[63279]: _type = "Task" [ 2218.884278] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2218.892753] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087789, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.895390] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "refresh_cache-59ad6180-e561-41e3-86e4-37fc20819578" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2218.895518] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquired lock "refresh_cache-59ad6180-e561-41e3-86e4-37fc20819578" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2218.895666] env[63279]: DEBUG nova.network.neutron [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2218.920669] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087788, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.162922] env[63279]: DEBUG nova.network.neutron [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Successfully created port: 536b76b2-9f06-4910-aba3-8d499c43d065 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2219.322930] env[63279]: DEBUG nova.compute.manager [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2219.353652] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2219.401925] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087789, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.409120] env[63279]: DEBUG nova.network.neutron [-] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2219.422063] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087788, 'name': Destroy_Task, 'duration_secs': 0.753519} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2219.425227] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Destroyed the VM [ 2219.425689] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2219.428059] env[63279]: DEBUG nova.network.neutron [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Successfully created port: 7d3d5ab9-d32b-45e9-a056-b6aa86e777ed {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2219.430080] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c6be6f02-ccbb-444a-8527-a04a172772d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.438477] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2219.438477] env[63279]: value = "task-2087790" [ 2219.438477] env[63279]: _type = "Task" [ 2219.438477] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.451744] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087790, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.475023] env[63279]: DEBUG nova.network.neutron [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2219.618900] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bcb1398-3646-4acc-87f3-bceced06d15c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.631054] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366f3246-be62-41fb-98d3-545fad8c025b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.661364] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa64287-f7f6-4e18-b502-7fcf93f5199f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.664820] env[63279]: DEBUG nova.network.neutron [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Updating instance_info_cache with network_info: [{"id": "c8b3dd94-c21a-424e-85d2-ea499e510f55", "address": "fa:16:3e:76:a5:9e", "network": {"id": "1ebc2797-412f-4da1-ba28-8b54789f9203", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-665948294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "afe3ab970f3249719809afa9f9b11c1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b3dd94-c2", "ovs_interfaceid": "c8b3dd94-c21a-424e-85d2-ea499e510f55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2219.671085] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a52a5fe-2855-44b4-bfdb-14752365dc61 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.686369] env[63279]: DEBUG nova.compute.provider_tree [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2219.895932] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087789, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.73977} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2219.896059] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 69b3269a-2ba3-4f5f-a29c-62518c93da3d/69b3269a-2ba3-4f5f-a29c-62518c93da3d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2219.896253] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2219.896460] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8ae3e7f-b3d2-4750-9cd4-9740b6bf97b1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.904301] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2219.904301] env[63279]: value = "task-2087791" [ 2219.904301] env[63279]: _type = "Task" [ 2219.904301] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.913508] env[63279]: INFO nova.compute.manager [-] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Took 1.23 seconds to deallocate network for instance. [ 2219.913917] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087791, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.947739] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087790, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.168686] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Releasing lock "refresh_cache-59ad6180-e561-41e3-86e4-37fc20819578" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2220.168686] env[63279]: DEBUG nova.compute.manager [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Instance network_info: |[{"id": "c8b3dd94-c21a-424e-85d2-ea499e510f55", "address": "fa:16:3e:76:a5:9e", "network": {"id": "1ebc2797-412f-4da1-ba28-8b54789f9203", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-665948294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "afe3ab970f3249719809afa9f9b11c1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b3dd94-c2", "ovs_interfaceid": "c8b3dd94-c21a-424e-85d2-ea499e510f55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2220.168686] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:a5:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '496faa4d-d874-449b-905e-328ddd60b31b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8b3dd94-c21a-424e-85d2-ea499e510f55', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2220.175761] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Creating folder: Project (afe3ab970f3249719809afa9f9b11c1b). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2220.176108] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ab01a56-9cf3-4e9f-bcea-30febef9c1be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.187327] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Created folder: Project (afe3ab970f3249719809afa9f9b11c1b) in parent group-v427491. [ 2220.187531] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Creating folder: Instances. Parent ref: group-v427739. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2220.187843] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db87b5a1-2b51-49d0-a3ff-db4e90741eaf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.190236] env[63279]: DEBUG nova.scheduler.client.report [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2220.202066] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Created folder: Instances in parent group-v427739. [ 2220.202417] env[63279]: DEBUG oslo.service.loopingcall [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2220.202693] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2220.202995] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2c90e8e-990b-48c3-be2c-c3efbf23c7cf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.233586] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2220.233586] env[63279]: value = "task-2087794" [ 2220.233586] env[63279]: _type = "Task" [ 2220.233586] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.244956] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087794, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.326636] env[63279]: DEBUG nova.compute.manager [req-f40d7add-8a32-4804-b1fa-965019150f59 req-c4ea7f23-ed27-4eb7-a84c-83a661f1744d service nova] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Received event network-changed-c8b3dd94-c21a-424e-85d2-ea499e510f55 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2220.326873] env[63279]: DEBUG nova.compute.manager [req-f40d7add-8a32-4804-b1fa-965019150f59 req-c4ea7f23-ed27-4eb7-a84c-83a661f1744d service nova] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Refreshing instance network info cache due to event network-changed-c8b3dd94-c21a-424e-85d2-ea499e510f55. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2220.327135] env[63279]: DEBUG oslo_concurrency.lockutils [req-f40d7add-8a32-4804-b1fa-965019150f59 req-c4ea7f23-ed27-4eb7-a84c-83a661f1744d service nova] Acquiring lock "refresh_cache-59ad6180-e561-41e3-86e4-37fc20819578" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2220.327289] env[63279]: DEBUG oslo_concurrency.lockutils [req-f40d7add-8a32-4804-b1fa-965019150f59 req-c4ea7f23-ed27-4eb7-a84c-83a661f1744d service nova] Acquired lock "refresh_cache-59ad6180-e561-41e3-86e4-37fc20819578" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2220.327452] env[63279]: DEBUG nova.network.neutron [req-f40d7add-8a32-4804-b1fa-965019150f59 req-c4ea7f23-ed27-4eb7-a84c-83a661f1744d service nova] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Refreshing network info cache for port c8b3dd94-c21a-424e-85d2-ea499e510f55 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2220.335985] env[63279]: DEBUG nova.compute.manager [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2220.360694] env[63279]: DEBUG nova.virt.hardware [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2220.361021] env[63279]: DEBUG nova.virt.hardware [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2220.361205] env[63279]: DEBUG nova.virt.hardware [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2220.361399] env[63279]: DEBUG nova.virt.hardware [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2220.361551] env[63279]: DEBUG nova.virt.hardware [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2220.361703] env[63279]: DEBUG nova.virt.hardware [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2220.361914] env[63279]: DEBUG nova.virt.hardware [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2220.362115] env[63279]: DEBUG nova.virt.hardware [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2220.362307] env[63279]: DEBUG nova.virt.hardware [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2220.362478] env[63279]: DEBUG nova.virt.hardware [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2220.362656] env[63279]: DEBUG nova.virt.hardware [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2220.363514] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a502eb72-aaa6-45c6-8781-9cc939d844f8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.371692] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436ba7b7-da0a-4f1f-ad25-b2f05ff8f624 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.413955] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087791, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060995} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.414228] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2220.414936] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe23389-15a9-46cf-ab79-7288ae5c45b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.428324] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2220.436661] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 69b3269a-2ba3-4f5f-a29c-62518c93da3d/69b3269a-2ba3-4f5f-a29c-62518c93da3d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2220.436892] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd9ea70b-6659-4a65-9a23-2d052f502a43 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.458138] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087790, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.459290] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2220.459290] env[63279]: value = "task-2087795" [ 2220.459290] env[63279]: _type = "Task" [ 2220.459290] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.466758] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087795, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.695729] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.386s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2220.696305] env[63279]: DEBUG nova.compute.manager [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2220.699228] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.527s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2220.699409] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2220.699597] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2220.699945] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.347s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2220.700165] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2220.702468] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.274s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2220.702694] env[63279]: DEBUG nova.objects.instance [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lazy-loading 'resources' on Instance uuid f18d3019-bf1f-4519-a824-7ca80458d793 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2220.706159] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c383ffd-579a-42ae-9082-104c45a24e18 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.715645] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0ad4fc-52f8-45c8-a519-e1e8165db552 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.742512] env[63279]: INFO nova.scheduler.client.report [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleted allocations for instance 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0 [ 2220.748944] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657967ca-ca9c-46ce-8791-fdc38d31ae89 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.770908] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087794, 'name': CreateVM_Task, 'duration_secs': 0.427652} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.771290] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2220.772840] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f20c2e-c112-4830-96b6-4268517a5fe2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.778455] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2220.778644] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2220.778984] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2220.779498] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c800d66-3d5c-42f3-98ba-9b9aef44ad0c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.784579] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2220.784579] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]520bae25-5e3d-06ed-55cd-39915a2dadad" [ 2220.784579] env[63279]: _type = "Task" [ 2220.784579] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.811328] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179624MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2220.811488] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2220.823284] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520bae25-5e3d-06ed-55cd-39915a2dadad, 'name': SearchDatastore_Task, 'duration_secs': 0.027467} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.824126] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2220.824126] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2220.824126] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2220.824278] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2220.824341] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2220.824803] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2556cff4-d8ae-4872-9ac8-ddea9acd4044 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.834057] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2220.834240] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2220.834918] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5abe8f85-a597-4c35-a211-de344b7da685 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.840463] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2220.840463] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52462db6-52f5-23f0-951c-9835d2a2020c" [ 2220.840463] env[63279]: _type = "Task" [ 2220.840463] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.847949] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52462db6-52f5-23f0-951c-9835d2a2020c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.962438] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087790, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.974629] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087795, 'name': ReconfigVM_Task, 'duration_secs': 0.281872} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.974917] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 69b3269a-2ba3-4f5f-a29c-62518c93da3d/69b3269a-2ba3-4f5f-a29c-62518c93da3d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2220.975681] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-59ca8b8d-3ecd-4fd5-ac5a-eabbb65290b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.981680] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2220.981680] env[63279]: value = "task-2087796" [ 2220.981680] env[63279]: _type = "Task" [ 2220.981680] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.992919] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087796, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.023481] env[63279]: DEBUG nova.compute.manager [req-3754304c-0997-4f84-8162-c022c1503abe req-e1e7af00-6da6-4cce-b0aa-983ddfd1e8b2 service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Received event network-vif-plugged-536b76b2-9f06-4910-aba3-8d499c43d065 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2221.023717] env[63279]: DEBUG oslo_concurrency.lockutils [req-3754304c-0997-4f84-8162-c022c1503abe req-e1e7af00-6da6-4cce-b0aa-983ddfd1e8b2 service nova] Acquiring lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2221.023920] env[63279]: DEBUG oslo_concurrency.lockutils [req-3754304c-0997-4f84-8162-c022c1503abe req-e1e7af00-6da6-4cce-b0aa-983ddfd1e8b2 service nova] Lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2221.024112] env[63279]: DEBUG oslo_concurrency.lockutils [req-3754304c-0997-4f84-8162-c022c1503abe req-e1e7af00-6da6-4cce-b0aa-983ddfd1e8b2 service nova] Lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.024288] env[63279]: DEBUG nova.compute.manager [req-3754304c-0997-4f84-8162-c022c1503abe req-e1e7af00-6da6-4cce-b0aa-983ddfd1e8b2 service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] No waiting events found dispatching network-vif-plugged-536b76b2-9f06-4910-aba3-8d499c43d065 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2221.024446] env[63279]: WARNING nova.compute.manager [req-3754304c-0997-4f84-8162-c022c1503abe req-e1e7af00-6da6-4cce-b0aa-983ddfd1e8b2 service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Received unexpected event network-vif-plugged-536b76b2-9f06-4910-aba3-8d499c43d065 for instance with vm_state building and task_state spawning. [ 2221.059939] env[63279]: DEBUG nova.network.neutron [req-f40d7add-8a32-4804-b1fa-965019150f59 req-c4ea7f23-ed27-4eb7-a84c-83a661f1744d service nova] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Updated VIF entry in instance network info cache for port c8b3dd94-c21a-424e-85d2-ea499e510f55. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2221.060383] env[63279]: DEBUG nova.network.neutron [req-f40d7add-8a32-4804-b1fa-965019150f59 req-c4ea7f23-ed27-4eb7-a84c-83a661f1744d service nova] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Updating instance_info_cache with network_info: [{"id": "c8b3dd94-c21a-424e-85d2-ea499e510f55", "address": "fa:16:3e:76:a5:9e", "network": {"id": "1ebc2797-412f-4da1-ba28-8b54789f9203", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-665948294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "afe3ab970f3249719809afa9f9b11c1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b3dd94-c2", "ovs_interfaceid": "c8b3dd94-c21a-424e-85d2-ea499e510f55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2221.113460] env[63279]: DEBUG nova.network.neutron [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Successfully updated port: 536b76b2-9f06-4910-aba3-8d499c43d065 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2221.206159] env[63279]: DEBUG nova.compute.utils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2221.207955] env[63279]: DEBUG nova.compute.manager [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2221.208130] env[63279]: DEBUG nova.network.neutron [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2221.248403] env[63279]: DEBUG nova.policy [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab1648b445ab458d92404e3a5ddb8619', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c98de1240f14b058e58f6a707096ef1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2221.262799] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1d286e34-6589-438c-9cdb-cfc4a6911ea0 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "736ab3ac-b9e0-4f9e-885b-765ca7a92ed0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.600s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.327843] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2221.353611] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52462db6-52f5-23f0-951c-9835d2a2020c, 'name': SearchDatastore_Task, 'duration_secs': 0.057898} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2221.356705] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3f44c21-bf0c-4f8e-8f88-664a6ed90148 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.361959] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2221.361959] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52e6c732-57d8-1489-c90a-44f14cf7f0c3" [ 2221.361959] env[63279]: _type = "Task" [ 2221.361959] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.369914] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e6c732-57d8-1489-c90a-44f14cf7f0c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.413694] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc66363-bf70-48c5-8895-cb057a8f533b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.422035] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c47ed0-a2b5-4c66-a77e-a509afb18be3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.452081] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18a085b-fe98-4402-9ad3-bee32940e91f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.464690] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378b69de-961d-4a30-b59d-44dd539e3351 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.468753] env[63279]: DEBUG oslo_vmware.api [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087790, 'name': RemoveSnapshot_Task, 'duration_secs': 1.558729} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2221.469090] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2221.481544] env[63279]: DEBUG nova.compute.provider_tree [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2221.492718] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087796, 'name': Rename_Task, 'duration_secs': 0.186234} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2221.493079] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2221.493351] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50b5fb8a-1eeb-4f11-84c3-1dc76e7fda17 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.500285] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2221.500285] env[63279]: value = "task-2087797" [ 2221.500285] env[63279]: _type = "Task" [ 2221.500285] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.509985] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087797, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.541996] env[63279]: DEBUG nova.network.neutron [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Successfully created port: 7707bb8d-19ea-4514-a533-b6a5829b6231 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2221.562994] env[63279]: DEBUG oslo_concurrency.lockutils [req-f40d7add-8a32-4804-b1fa-965019150f59 req-c4ea7f23-ed27-4eb7-a84c-83a661f1744d service nova] Releasing lock "refresh_cache-59ad6180-e561-41e3-86e4-37fc20819578" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2221.563274] env[63279]: DEBUG nova.compute.manager [req-f40d7add-8a32-4804-b1fa-965019150f59 req-c4ea7f23-ed27-4eb7-a84c-83a661f1744d service nova] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Received event network-vif-deleted-4556aa7b-156a-4ab0-838d-aed88aa7449f {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2221.710859] env[63279]: DEBUG nova.compute.manager [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2221.874182] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e6c732-57d8-1489-c90a-44f14cf7f0c3, 'name': SearchDatastore_Task, 'duration_secs': 0.03209} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2221.874438] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2221.874696] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 59ad6180-e561-41e3-86e4-37fc20819578/59ad6180-e561-41e3-86e4-37fc20819578.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2221.875802] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c313e52-7f48-4364-9092-0721addc70ef {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.882460] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2221.882460] env[63279]: value = "task-2087798" [ 2221.882460] env[63279]: _type = "Task" [ 2221.882460] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.894066] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.974163] env[63279]: WARNING nova.compute.manager [None req-ac2052a2-4ae2-4d31-a9a2-3f92b912b650 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Image not found during snapshot: nova.exception.ImageNotFound: Image 36f26273-1ab5-494f-b233-e1bd72926c8f could not be found. [ 2222.013825] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087797, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.019923] env[63279]: DEBUG nova.scheduler.client.report [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 126 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2222.020238] env[63279]: DEBUG nova.compute.provider_tree [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 126 to 127 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2222.020476] env[63279]: DEBUG nova.compute.provider_tree [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2222.392722] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087798, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.513632] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087797, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.526110] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.823s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2222.529377] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.718s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2222.552050] env[63279]: INFO nova.scheduler.client.report [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleted allocations for instance f18d3019-bf1f-4519-a824-7ca80458d793 [ 2222.605654] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "14c698c8-7459-4843-bb19-f915742e3e53" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2222.605943] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "14c698c8-7459-4843-bb19-f915742e3e53" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2222.606187] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "14c698c8-7459-4843-bb19-f915742e3e53-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2222.606396] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "14c698c8-7459-4843-bb19-f915742e3e53-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2222.606568] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "14c698c8-7459-4843-bb19-f915742e3e53-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2222.608858] env[63279]: INFO nova.compute.manager [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Terminating instance [ 2222.722619] env[63279]: DEBUG nova.compute.manager [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2222.747542] env[63279]: DEBUG nova.virt.hardware [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2222.747949] env[63279]: DEBUG nova.virt.hardware [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2222.748201] env[63279]: DEBUG nova.virt.hardware [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2222.748409] env[63279]: DEBUG nova.virt.hardware [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2222.748564] env[63279]: DEBUG nova.virt.hardware [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2222.748719] env[63279]: DEBUG nova.virt.hardware [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2222.748939] env[63279]: DEBUG nova.virt.hardware [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2222.749120] env[63279]: DEBUG nova.virt.hardware [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2222.749300] env[63279]: DEBUG nova.virt.hardware [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2222.749469] env[63279]: DEBUG nova.virt.hardware [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2222.749647] env[63279]: DEBUG nova.virt.hardware [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2222.750931] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65bae321-b4cb-4b06-b573-73ee33733608 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.759326] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f636fa12-9e99-45d3-b244-cfdcd420e5d0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.893136] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087798, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.735318} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2222.893404] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 59ad6180-e561-41e3-86e4-37fc20819578/59ad6180-e561-41e3-86e4-37fc20819578.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2222.893621] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2222.893874] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54f87d92-e467-48c3-b9ca-59214221e684 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.900891] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2222.900891] env[63279]: value = "task-2087799" [ 2222.900891] env[63279]: _type = "Task" [ 2222.900891] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.909219] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087799, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.013603] env[63279]: DEBUG oslo_vmware.api [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087797, 'name': PowerOnVM_Task, 'duration_secs': 1.139977} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2223.013871] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2223.017266] env[63279]: INFO nova.compute.manager [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Took 15.00 seconds to spawn the instance on the hypervisor. [ 2223.017510] env[63279]: DEBUG nova.compute.manager [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2223.018298] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ef6bd2-e216-4604-9e32-85f47f8008de {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.061897] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8c4cc515-3d7d-44c5-b1c5-7f29579d7178 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "f18d3019-bf1f-4519-a824-7ca80458d793" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.796s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2223.112454] env[63279]: DEBUG nova.compute.manager [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2223.112670] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2223.113583] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b83b4a3-3e88-4e1b-9abf-5dcfd554c30b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.118822] env[63279]: DEBUG nova.compute.manager [req-d92e5c52-34a7-4f68-a6f5-5f109439ae17 req-2122e51f-30ff-4060-8149-b1c2de2f7e7b service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Received event network-changed-536b76b2-9f06-4910-aba3-8d499c43d065 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2223.119009] env[63279]: DEBUG nova.compute.manager [req-d92e5c52-34a7-4f68-a6f5-5f109439ae17 req-2122e51f-30ff-4060-8149-b1c2de2f7e7b service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Refreshing instance network info cache due to event network-changed-536b76b2-9f06-4910-aba3-8d499c43d065. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2223.119450] env[63279]: DEBUG oslo_concurrency.lockutils [req-d92e5c52-34a7-4f68-a6f5-5f109439ae17 req-2122e51f-30ff-4060-8149-b1c2de2f7e7b service nova] Acquiring lock "refresh_cache-f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2223.119450] env[63279]: DEBUG oslo_concurrency.lockutils [req-d92e5c52-34a7-4f68-a6f5-5f109439ae17 req-2122e51f-30ff-4060-8149-b1c2de2f7e7b service nova] Acquired lock "refresh_cache-f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2223.119572] env[63279]: DEBUG nova.network.neutron [req-d92e5c52-34a7-4f68-a6f5-5f109439ae17 req-2122e51f-30ff-4060-8149-b1c2de2f7e7b service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Refreshing network info cache for port 536b76b2-9f06-4910-aba3-8d499c43d065 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2223.125634] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2223.125895] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-169733d6-7920-4e89-9181-af3ca8d3de94 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.132359] env[63279]: DEBUG oslo_vmware.api [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2223.132359] env[63279]: value = "task-2087800" [ 2223.132359] env[63279]: _type = "Task" [ 2223.132359] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2223.141326] env[63279]: DEBUG oslo_vmware.api [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087800, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.158721] env[63279]: DEBUG nova.compute.manager [req-7c5a94b6-0596-49a6-a341-69730c37c734 req-b6feaf0a-cf79-4798-ae82-5d16b323c6b0 service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Received event network-vif-plugged-7d3d5ab9-d32b-45e9-a056-b6aa86e777ed {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2223.158976] env[63279]: DEBUG oslo_concurrency.lockutils [req-7c5a94b6-0596-49a6-a341-69730c37c734 req-b6feaf0a-cf79-4798-ae82-5d16b323c6b0 service nova] Acquiring lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2223.159240] env[63279]: DEBUG oslo_concurrency.lockutils [req-7c5a94b6-0596-49a6-a341-69730c37c734 req-b6feaf0a-cf79-4798-ae82-5d16b323c6b0 service nova] Lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2223.159433] env[63279]: DEBUG oslo_concurrency.lockutils [req-7c5a94b6-0596-49a6-a341-69730c37c734 req-b6feaf0a-cf79-4798-ae82-5d16b323c6b0 service nova] Lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2223.159624] env[63279]: DEBUG nova.compute.manager [req-7c5a94b6-0596-49a6-a341-69730c37c734 req-b6feaf0a-cf79-4798-ae82-5d16b323c6b0 service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] No waiting events found dispatching network-vif-plugged-7d3d5ab9-d32b-45e9-a056-b6aa86e777ed {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2223.159825] env[63279]: WARNING nova.compute.manager [req-7c5a94b6-0596-49a6-a341-69730c37c734 req-b6feaf0a-cf79-4798-ae82-5d16b323c6b0 service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Received unexpected event network-vif-plugged-7d3d5ab9-d32b-45e9-a056-b6aa86e777ed for instance with vm_state building and task_state spawning. [ 2223.166241] env[63279]: DEBUG nova.network.neutron [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Successfully updated port: 7707bb8d-19ea-4514-a533-b6a5829b6231 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2223.319571] env[63279]: DEBUG nova.network.neutron [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Successfully updated port: 7d3d5ab9-d32b-45e9-a056-b6aa86e777ed {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2223.410741] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087799, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092153} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2223.411034] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2223.411797] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1704cda-3a8d-4007-b8ee-d46899436ce2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.433982] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 59ad6180-e561-41e3-86e4-37fc20819578/59ad6180-e561-41e3-86e4-37fc20819578.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2223.434625] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a103044e-38c8-432d-b48c-d7b4300d798a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.454199] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2223.454199] env[63279]: value = "task-2087801" [ 2223.454199] env[63279]: _type = "Task" [ 2223.454199] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2223.463514] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087801, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.539848] env[63279]: INFO nova.compute.manager [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Took 28.70 seconds to build instance. [ 2223.571539] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 0e12ab9b-a701-4e0f-9d96-939090f50494 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2223.571742] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance fcdd3076-2b53-4850-9730-2f877e2cabfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2223.571901] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 4ce17bdb-4bed-4e06-af13-e4097b55e17d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2223.572071] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance a6758131-030e-4b33-a2c9-8864055a5bec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2223.572222] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 686a7ce2-2d07-411e-91d6-0471c55c3728 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2223.572377] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2223.572599] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 9b98a316-71da-45fb-b895-553f179fe7d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2223.572667] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 27e2917d-3cd0-4ad3-ab65-f85f7d97225f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2223.572786] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 14c698c8-7459-4843-bb19-f915742e3e53 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2223.572928] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2223.573089] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 69b3269a-2ba3-4f5f-a29c-62518c93da3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2223.642503] env[63279]: DEBUG oslo_vmware.api [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087800, 'name': PowerOffVM_Task, 'duration_secs': 0.168494} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2223.642834] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2223.643042] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2223.643307] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27958ee2-9f4f-4ced-84b2-81c8b090861e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.654418] env[63279]: DEBUG nova.network.neutron [req-d92e5c52-34a7-4f68-a6f5-5f109439ae17 req-2122e51f-30ff-4060-8149-b1c2de2f7e7b service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2223.668282] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "refresh_cache-9a6abf10-ed12-49c6-aa16-f428da9f9f9d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2223.668424] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "refresh_cache-9a6abf10-ed12-49c6-aa16-f428da9f9f9d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2223.668628] env[63279]: DEBUG nova.network.neutron [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2223.744911] env[63279]: DEBUG nova.network.neutron [req-d92e5c52-34a7-4f68-a6f5-5f109439ae17 req-2122e51f-30ff-4060-8149-b1c2de2f7e7b service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2223.796243] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2223.797027] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2223.797027] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleting the datastore file [datastore1] 14c698c8-7459-4843-bb19-f915742e3e53 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2223.797027] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c27b58a9-9525-414c-b418-e93ee7c8cb68 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.803636] env[63279]: DEBUG oslo_vmware.api [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for the task: (returnval){ [ 2223.803636] env[63279]: value = "task-2087803" [ 2223.803636] env[63279]: _type = "Task" [ 2223.803636] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2223.811705] env[63279]: DEBUG oslo_vmware.api [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087803, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.822771] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "refresh_cache-f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2223.964396] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087801, 'name': ReconfigVM_Task, 'duration_secs': 0.289922} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2223.964686] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 59ad6180-e561-41e3-86e4-37fc20819578/59ad6180-e561-41e3-86e4-37fc20819578.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2223.965331] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7dde039f-6694-4a90-bc9b-5f2f76d82362 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.973049] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2223.973049] env[63279]: value = "task-2087804" [ 2223.973049] env[63279]: _type = "Task" [ 2223.973049] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2223.981030] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087804, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.041362] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2199699-eea6-4f35-987c-22394a2587cb tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "69b3269a-2ba3-4f5f-a29c-62518c93da3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.206s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2224.076897] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance a15141bc-424d-48ca-a6d5-c859a3639a0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2224.076897] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 59ad6180-e561-41e3-86e4-37fc20819578 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2224.076897] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance f418ff3d-8a5f-4d78-994a-e40b62cfcdd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2224.076897] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 9a6abf10-ed12-49c6-aa16-f428da9f9f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2224.077132] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2224.077282] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3200MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2224.216860] env[63279]: DEBUG nova.network.neutron [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2224.246597] env[63279]: DEBUG oslo_concurrency.lockutils [req-d92e5c52-34a7-4f68-a6f5-5f109439ae17 req-2122e51f-30ff-4060-8149-b1c2de2f7e7b service nova] Releasing lock "refresh_cache-f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2224.246871] env[63279]: DEBUG nova.compute.manager [req-d92e5c52-34a7-4f68-a6f5-5f109439ae17 req-2122e51f-30ff-4060-8149-b1c2de2f7e7b service nova] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Received event network-vif-plugged-7707bb8d-19ea-4514-a533-b6a5829b6231 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2224.247075] env[63279]: DEBUG oslo_concurrency.lockutils [req-d92e5c52-34a7-4f68-a6f5-5f109439ae17 req-2122e51f-30ff-4060-8149-b1c2de2f7e7b service nova] Acquiring lock "9a6abf10-ed12-49c6-aa16-f428da9f9f9d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2224.247281] env[63279]: DEBUG oslo_concurrency.lockutils [req-d92e5c52-34a7-4f68-a6f5-5f109439ae17 req-2122e51f-30ff-4060-8149-b1c2de2f7e7b service nova] Lock "9a6abf10-ed12-49c6-aa16-f428da9f9f9d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2224.248028] env[63279]: DEBUG oslo_concurrency.lockutils [req-d92e5c52-34a7-4f68-a6f5-5f109439ae17 req-2122e51f-30ff-4060-8149-b1c2de2f7e7b service nova] Lock "9a6abf10-ed12-49c6-aa16-f428da9f9f9d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2224.248028] env[63279]: DEBUG nova.compute.manager [req-d92e5c52-34a7-4f68-a6f5-5f109439ae17 req-2122e51f-30ff-4060-8149-b1c2de2f7e7b service nova] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] No waiting events found dispatching network-vif-plugged-7707bb8d-19ea-4514-a533-b6a5829b6231 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2224.248028] env[63279]: WARNING nova.compute.manager [req-d92e5c52-34a7-4f68-a6f5-5f109439ae17 req-2122e51f-30ff-4060-8149-b1c2de2f7e7b service nova] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Received unexpected event network-vif-plugged-7707bb8d-19ea-4514-a533-b6a5829b6231 for instance with vm_state building and task_state spawning. [ 2224.248185] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquired lock "refresh_cache-f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2224.248268] env[63279]: DEBUG nova.network.neutron [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2224.285007] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024bc375-2bb6-4f5a-bcb7-707055d7c351 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.295821] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4446af86-7d43-4f51-a657-49dadaa33db4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.328281] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "48794f65-355c-4cad-a83f-6b8cc327dd4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2224.328488] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "48794f65-355c-4cad-a83f-6b8cc327dd4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2224.333076] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54293c10-570e-4d3d-b077-ba49637f89f7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.343844] env[63279]: DEBUG oslo_vmware.api [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087803, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.347493] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37fa3abd-9549-49d2-a605-f42843156996 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.361857] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2224.375762] env[63279]: DEBUG nova.network.neutron [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Updating instance_info_cache with network_info: [{"id": "7707bb8d-19ea-4514-a533-b6a5829b6231", "address": "fa:16:3e:40:f5:35", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7707bb8d-19", "ovs_interfaceid": "7707bb8d-19ea-4514-a533-b6a5829b6231", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2224.482708] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087804, 'name': Rename_Task, 'duration_secs': 0.147231} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2224.483055] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2224.484127] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cebb71b1-2c05-47fd-9620-94bd4c28b6c6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.489398] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2224.489398] env[63279]: value = "task-2087805" [ 2224.489398] env[63279]: _type = "Task" [ 2224.489398] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2224.497586] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087805, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.784484] env[63279]: DEBUG nova.network.neutron [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2224.839229] env[63279]: DEBUG nova.compute.manager [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2224.841572] env[63279]: DEBUG oslo_vmware.api [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Task: {'id': task-2087803, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.543661} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2224.841811] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2224.841998] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2224.842191] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2224.842363] env[63279]: INFO nova.compute.manager [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Took 1.73 seconds to destroy the instance on the hypervisor. [ 2224.842599] env[63279]: DEBUG oslo.service.loopingcall [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2224.842790] env[63279]: DEBUG nova.compute.manager [-] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2224.842886] env[63279]: DEBUG nova.network.neutron [-] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2224.879117] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "refresh_cache-9a6abf10-ed12-49c6-aa16-f428da9f9f9d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2224.879416] env[63279]: DEBUG nova.compute.manager [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Instance network_info: |[{"id": "7707bb8d-19ea-4514-a533-b6a5829b6231", "address": "fa:16:3e:40:f5:35", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7707bb8d-19", "ovs_interfaceid": "7707bb8d-19ea-4514-a533-b6a5829b6231", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2224.880117] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:f5:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7707bb8d-19ea-4514-a533-b6a5829b6231', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2224.887551] env[63279]: DEBUG oslo.service.loopingcall [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2224.888780] env[63279]: ERROR nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [req-88265bcd-db15-4af0-aa2c-85b7046eb499] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-88265bcd-db15-4af0-aa2c-85b7046eb499"}]} [ 2224.889395] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2224.892182] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4743e77d-e724-4147-885c-4fb2c45c7dd0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.908566] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2224.917123] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2224.917123] env[63279]: value = "task-2087806" [ 2224.917123] env[63279]: _type = "Task" [ 2224.917123] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2224.921069] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2224.921254] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2224.928377] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087806, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.932623] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2224.951228] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2225.003861] env[63279]: DEBUG oslo_vmware.api [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087805, 'name': PowerOnVM_Task, 'duration_secs': 0.494936} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2225.004341] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2225.004603] env[63279]: INFO nova.compute.manager [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Took 7.08 seconds to spawn the instance on the hypervisor. [ 2225.004821] env[63279]: DEBUG nova.compute.manager [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2225.010141] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb44a11-7f04-4f40-b2bd-02cbe49908ae {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.108161] env[63279]: DEBUG nova.network.neutron [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Updating instance_info_cache with network_info: [{"id": "536b76b2-9f06-4910-aba3-8d499c43d065", "address": "fa:16:3e:2e:7e:86", "network": {"id": "d76c06b8-5a99-4b66-af3c-c670c113e1a3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1198986387", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e59b364d-b7f6-499d-b7dc-82b8a819aa12", "external-id": "nsx-vlan-transportzone-731", "segmentation_id": 731, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap536b76b2-9f", "ovs_interfaceid": "536b76b2-9f06-4910-aba3-8d499c43d065", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7d3d5ab9-d32b-45e9-a056-b6aa86e777ed", "address": "fa:16:3e:c5:5b:75", "network": {"id": "618c79a5-04b9-4756-ae4e-0f1e187dfa64", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1745455961", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d3d5ab9-d3", "ovs_interfaceid": "7d3d5ab9-d32b-45e9-a056-b6aa86e777ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2225.121409] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "6b4a94f9-1cd9-466f-844f-8d692b732abc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2225.121663] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "6b4a94f9-1cd9-466f-844f-8d692b732abc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2225.145524] env[63279]: DEBUG nova.compute.manager [req-47cf93ad-135e-42f1-ac66-809fa08ccfa5 req-00990d46-d271-436c-a98c-280334e5ddee service nova] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Received event network-changed-7707bb8d-19ea-4514-a533-b6a5829b6231 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2225.145767] env[63279]: DEBUG nova.compute.manager [req-47cf93ad-135e-42f1-ac66-809fa08ccfa5 req-00990d46-d271-436c-a98c-280334e5ddee service nova] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Refreshing instance network info cache due to event network-changed-7707bb8d-19ea-4514-a533-b6a5829b6231. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2225.145957] env[63279]: DEBUG oslo_concurrency.lockutils [req-47cf93ad-135e-42f1-ac66-809fa08ccfa5 req-00990d46-d271-436c-a98c-280334e5ddee service nova] Acquiring lock "refresh_cache-9a6abf10-ed12-49c6-aa16-f428da9f9f9d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2225.146161] env[63279]: DEBUG oslo_concurrency.lockutils [req-47cf93ad-135e-42f1-ac66-809fa08ccfa5 req-00990d46-d271-436c-a98c-280334e5ddee service nova] Acquired lock "refresh_cache-9a6abf10-ed12-49c6-aa16-f428da9f9f9d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2225.146345] env[63279]: DEBUG nova.network.neutron [req-47cf93ad-135e-42f1-ac66-809fa08ccfa5 req-00990d46-d271-436c-a98c-280334e5ddee service nova] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Refreshing network info cache for port 7707bb8d-19ea-4514-a533-b6a5829b6231 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2225.185493] env[63279]: DEBUG nova.compute.manager [req-0c5a5464-8fcb-4038-b9b3-19e8e8e61c43 req-9fc7ae1c-617c-4bb7-aa9f-46e7d8d5ea6a service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Received event network-changed-7d3d5ab9-d32b-45e9-a056-b6aa86e777ed {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2225.185700] env[63279]: DEBUG nova.compute.manager [req-0c5a5464-8fcb-4038-b9b3-19e8e8e61c43 req-9fc7ae1c-617c-4bb7-aa9f-46e7d8d5ea6a service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Refreshing instance network info cache due to event network-changed-7d3d5ab9-d32b-45e9-a056-b6aa86e777ed. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2225.185902] env[63279]: DEBUG oslo_concurrency.lockutils [req-0c5a5464-8fcb-4038-b9b3-19e8e8e61c43 req-9fc7ae1c-617c-4bb7-aa9f-46e7d8d5ea6a service nova] Acquiring lock "refresh_cache-f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2225.210420] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92e3120-fdf8-4297-bbe3-2dfcf8c71e82 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.218213] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82efa320-7409-46de-9c9c-b8c2cb678e61 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.250235] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a85e81f-7271-44c4-b3d4-d1c61f658b6b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.260038] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6ae8cd-4e50-4c73-9350-87ba712649a0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.272223] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2225.364524] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2225.427661] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087806, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2225.530028] env[63279]: INFO nova.compute.manager [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Took 19.13 seconds to build instance. [ 2225.587704] env[63279]: DEBUG nova.network.neutron [-] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2225.613305] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Releasing lock "refresh_cache-f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2225.613572] env[63279]: DEBUG nova.compute.manager [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Instance network_info: |[{"id": "536b76b2-9f06-4910-aba3-8d499c43d065", "address": "fa:16:3e:2e:7e:86", "network": {"id": "d76c06b8-5a99-4b66-af3c-c670c113e1a3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1198986387", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e59b364d-b7f6-499d-b7dc-82b8a819aa12", "external-id": "nsx-vlan-transportzone-731", "segmentation_id": 731, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap536b76b2-9f", "ovs_interfaceid": "536b76b2-9f06-4910-aba3-8d499c43d065", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7d3d5ab9-d32b-45e9-a056-b6aa86e777ed", "address": "fa:16:3e:c5:5b:75", "network": {"id": "618c79a5-04b9-4756-ae4e-0f1e187dfa64", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1745455961", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d3d5ab9-d3", "ovs_interfaceid": "7d3d5ab9-d32b-45e9-a056-b6aa86e777ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2225.614199] env[63279]: DEBUG oslo_concurrency.lockutils [req-0c5a5464-8fcb-4038-b9b3-19e8e8e61c43 req-9fc7ae1c-617c-4bb7-aa9f-46e7d8d5ea6a service nova] Acquired lock "refresh_cache-f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2225.614387] env[63279]: DEBUG nova.network.neutron [req-0c5a5464-8fcb-4038-b9b3-19e8e8e61c43 req-9fc7ae1c-617c-4bb7-aa9f-46e7d8d5ea6a service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Refreshing network info cache for port 7d3d5ab9-d32b-45e9-a056-b6aa86e777ed {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2225.615695] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:7e:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e59b364d-b7f6-499d-b7dc-82b8a819aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '536b76b2-9f06-4910-aba3-8d499c43d065', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:5b:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a31c4b8-5b72-4f32-aab3-c4e963e684dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d3d5ab9-d32b-45e9-a056-b6aa86e777ed', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2225.624987] env[63279]: DEBUG oslo.service.loopingcall [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2225.628086] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2225.628490] env[63279]: DEBUG nova.compute.manager [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2225.631563] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65d57922-1c5e-4292-926d-3621fe811987 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.656020] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2225.656020] env[63279]: value = "task-2087807" [ 2225.656020] env[63279]: _type = "Task" [ 2225.656020] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2225.664387] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087807, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2225.812964] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 130 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2225.813224] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 130 to 131 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2225.813392] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2225.890340] env[63279]: DEBUG nova.network.neutron [req-0c5a5464-8fcb-4038-b9b3-19e8e8e61c43 req-9fc7ae1c-617c-4bb7-aa9f-46e7d8d5ea6a service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Updated VIF entry in instance network info cache for port 7d3d5ab9-d32b-45e9-a056-b6aa86e777ed. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2225.890688] env[63279]: DEBUG nova.network.neutron [req-0c5a5464-8fcb-4038-b9b3-19e8e8e61c43 req-9fc7ae1c-617c-4bb7-aa9f-46e7d8d5ea6a service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Updating instance_info_cache with network_info: [{"id": "536b76b2-9f06-4910-aba3-8d499c43d065", "address": "fa:16:3e:2e:7e:86", "network": {"id": "d76c06b8-5a99-4b66-af3c-c670c113e1a3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1198986387", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e59b364d-b7f6-499d-b7dc-82b8a819aa12", "external-id": "nsx-vlan-transportzone-731", "segmentation_id": 731, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap536b76b2-9f", "ovs_interfaceid": "536b76b2-9f06-4910-aba3-8d499c43d065", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7d3d5ab9-d32b-45e9-a056-b6aa86e777ed", "address": "fa:16:3e:c5:5b:75", "network": {"id": "618c79a5-04b9-4756-ae4e-0f1e187dfa64", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1745455961", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "161d1caf722349c188ca8fc647989c05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d3d5ab9-d3", "ovs_interfaceid": "7d3d5ab9-d32b-45e9-a056-b6aa86e777ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2225.899603] env[63279]: INFO nova.compute.manager [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Rescuing [ 2225.899603] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "refresh_cache-59ad6180-e561-41e3-86e4-37fc20819578" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2225.899603] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquired lock "refresh_cache-59ad6180-e561-41e3-86e4-37fc20819578" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2225.899603] env[63279]: DEBUG nova.network.neutron [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2225.928528] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087806, 'name': CreateVM_Task, 'duration_secs': 0.591987} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2225.928714] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2225.929400] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2225.929558] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2225.929901] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2225.930182] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fece9eae-42d3-4ec7-993a-88a091598389 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.935622] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2225.935622] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d21f01-e2c9-2a6d-08e6-4287e5837c45" [ 2225.935622] env[63279]: _type = "Task" [ 2225.935622] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2225.943776] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d21f01-e2c9-2a6d-08e6-4287e5837c45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2225.964236] env[63279]: DEBUG nova.network.neutron [req-47cf93ad-135e-42f1-ac66-809fa08ccfa5 req-00990d46-d271-436c-a98c-280334e5ddee service nova] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Updated VIF entry in instance network info cache for port 7707bb8d-19ea-4514-a533-b6a5829b6231. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2225.964598] env[63279]: DEBUG nova.network.neutron [req-47cf93ad-135e-42f1-ac66-809fa08ccfa5 req-00990d46-d271-436c-a98c-280334e5ddee service nova] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Updating instance_info_cache with network_info: [{"id": "7707bb8d-19ea-4514-a533-b6a5829b6231", "address": "fa:16:3e:40:f5:35", "network": {"id": "d673eabe-0173-4c69-9163-88f4d673ca51", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1798523777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c98de1240f14b058e58f6a707096ef1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7707bb8d-19", "ovs_interfaceid": "7707bb8d-19ea-4514-a533-b6a5829b6231", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2226.032115] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9be47d77-4fd9-4f15-a0d1-1a230778846d tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "59ad6180-e561-41e3-86e4-37fc20819578" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.641s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2226.090412] env[63279]: INFO nova.compute.manager [-] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Took 1.25 seconds to deallocate network for instance. [ 2226.165125] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087807, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.168161] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2226.318845] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2226.319146] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.790s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2226.319434] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.992s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2226.319655] env[63279]: DEBUG nova.objects.instance [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lazy-loading 'pci_requests' on Instance uuid a15141bc-424d-48ca-a6d5-c859a3639a0b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2226.396099] env[63279]: DEBUG oslo_concurrency.lockutils [req-0c5a5464-8fcb-4038-b9b3-19e8e8e61c43 req-9fc7ae1c-617c-4bb7-aa9f-46e7d8d5ea6a service nova] Releasing lock "refresh_cache-f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2226.446703] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d21f01-e2c9-2a6d-08e6-4287e5837c45, 'name': SearchDatastore_Task, 'duration_secs': 0.022422} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2226.446970] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2226.447232] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2226.447478] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2226.447674] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2226.447843] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2226.448141] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-895e35fc-5bdd-491b-8754-4efcc21239ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.456569] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2226.456760] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2226.457499] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d190427-f561-422f-8c05-6f5c19b32a74 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.463763] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2226.463763] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b0c12c-5fdb-ba18-40e4-b51a8dec1eb9" [ 2226.463763] env[63279]: _type = "Task" [ 2226.463763] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.467413] env[63279]: DEBUG oslo_concurrency.lockutils [req-47cf93ad-135e-42f1-ac66-809fa08ccfa5 req-00990d46-d271-436c-a98c-280334e5ddee service nova] Releasing lock "refresh_cache-9a6abf10-ed12-49c6-aa16-f428da9f9f9d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2226.467704] env[63279]: DEBUG nova.compute.manager [req-47cf93ad-135e-42f1-ac66-809fa08ccfa5 req-00990d46-d271-436c-a98c-280334e5ddee service nova] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Received event network-vif-deleted-41a761e0-23d6-4305-8d20-d999d3f5b902 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2226.467936] env[63279]: INFO nova.compute.manager [req-47cf93ad-135e-42f1-ac66-809fa08ccfa5 req-00990d46-d271-436c-a98c-280334e5ddee service nova] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Neutron deleted interface 41a761e0-23d6-4305-8d20-d999d3f5b902; detaching it from the instance and deleting it from the info cache [ 2226.468141] env[63279]: DEBUG nova.network.neutron [req-47cf93ad-135e-42f1-ac66-809fa08ccfa5 req-00990d46-d271-436c-a98c-280334e5ddee service nova] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2226.474025] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b0c12c-5fdb-ba18-40e4-b51a8dec1eb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.597396] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2226.667550] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087807, 'name': CreateVM_Task, 'duration_secs': 0.882443} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2226.668367] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2226.669163] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2226.669473] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2226.669646] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2226.670017] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-686cdd0c-fd85-4281-84c5-b63b59046079 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.675055] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2226.675055] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52985214-18e9-b7f5-10ec-d93d2a9f0504" [ 2226.675055] env[63279]: _type = "Task" [ 2226.675055] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.683397] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52985214-18e9-b7f5-10ec-d93d2a9f0504, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.688010] env[63279]: DEBUG nova.network.neutron [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Updating instance_info_cache with network_info: [{"id": "c8b3dd94-c21a-424e-85d2-ea499e510f55", "address": "fa:16:3e:76:a5:9e", "network": {"id": "1ebc2797-412f-4da1-ba28-8b54789f9203", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-665948294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "afe3ab970f3249719809afa9f9b11c1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b3dd94-c2", "ovs_interfaceid": "c8b3dd94-c21a-424e-85d2-ea499e510f55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2226.823489] env[63279]: DEBUG nova.objects.instance [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lazy-loading 'numa_topology' on Instance uuid a15141bc-424d-48ca-a6d5-c859a3639a0b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2226.970572] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39bde040-770f-4817-b3a1-4545b49fdafa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.978777] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b0c12c-5fdb-ba18-40e4-b51a8dec1eb9, 'name': SearchDatastore_Task, 'duration_secs': 0.010269} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2226.979871] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6c0fc30-4bfc-4697-bdaa-2e0814ccd06b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.986467] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2a219e-d5fe-477c-aeb3-0a02d5dc02f1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.997838] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2226.997838] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5273d876-8580-babc-1a1e-7178a79fcb5f" [ 2226.997838] env[63279]: _type = "Task" [ 2226.997838] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.005933] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5273d876-8580-babc-1a1e-7178a79fcb5f, 'name': SearchDatastore_Task, 'duration_secs': 0.016486} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.006998] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2227.007322] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 9a6abf10-ed12-49c6-aa16-f428da9f9f9d/9a6abf10-ed12-49c6-aa16-f428da9f9f9d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2227.007563] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04700270-7cd3-471b-9168-2f770a0760aa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.020357] env[63279]: DEBUG nova.compute.manager [req-47cf93ad-135e-42f1-ac66-809fa08ccfa5 req-00990d46-d271-436c-a98c-280334e5ddee service nova] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Detach interface failed, port_id=41a761e0-23d6-4305-8d20-d999d3f5b902, reason: Instance 14c698c8-7459-4843-bb19-f915742e3e53 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2227.025318] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2227.025318] env[63279]: value = "task-2087808" [ 2227.025318] env[63279]: _type = "Task" [ 2227.025318] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.032654] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.185043] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52985214-18e9-b7f5-10ec-d93d2a9f0504, 'name': SearchDatastore_Task, 'duration_secs': 0.01032} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.185379] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2227.185618] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2227.185856] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2227.186018] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2227.186207] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2227.186559] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3d1d736-813b-4a9f-8198-c375d13eba9e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.191148] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Releasing lock "refresh_cache-59ad6180-e561-41e3-86e4-37fc20819578" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2227.194289] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2227.194464] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2227.195161] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6c0d7ae-7f40-4184-ad44-a0fbfe7e6936 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.202735] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2227.202735] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c2c763-582a-0f02-20d7-b54f9f0b0758" [ 2227.202735] env[63279]: _type = "Task" [ 2227.202735] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.212226] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c2c763-582a-0f02-20d7-b54f9f0b0758, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.325706] env[63279]: INFO nova.compute.claims [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2227.536447] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48414} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.536656] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 9a6abf10-ed12-49c6-aa16-f428da9f9f9d/9a6abf10-ed12-49c6-aa16-f428da9f9f9d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2227.536868] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2227.537138] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9385fe7b-e191-4770-a7be-5f77c10503b0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.543026] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2227.543026] env[63279]: value = "task-2087809" [ 2227.543026] env[63279]: _type = "Task" [ 2227.543026] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.550259] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087809, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.713712] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c2c763-582a-0f02-20d7-b54f9f0b0758, 'name': SearchDatastore_Task, 'duration_secs': 0.064686} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.714601] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1d2698b-b5c5-434a-8278-1b53608dcff3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.720020] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2227.720020] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527d5604-b09d-b4a3-a102-a58b77831bfa" [ 2227.720020] env[63279]: _type = "Task" [ 2227.720020] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.729140] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527d5604-b09d-b4a3-a102-a58b77831bfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.053026] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087809, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06109} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2228.053341] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2228.054114] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9168f477-8fe5-4cd4-bf8e-4f0f3d373f6f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.075907] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 9a6abf10-ed12-49c6-aa16-f428da9f9f9d/9a6abf10-ed12-49c6-aa16-f428da9f9f9d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2228.076209] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21805b13-f34e-40bb-a7ea-d3e969d06a63 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.096659] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2228.096659] env[63279]: value = "task-2087810" [ 2228.096659] env[63279]: _type = "Task" [ 2228.096659] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2228.105042] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087810, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.229851] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527d5604-b09d-b4a3-a102-a58b77831bfa, 'name': SearchDatastore_Task, 'duration_secs': 0.011064} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2228.231915] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2228.232217] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] f418ff3d-8a5f-4d78-994a-e40b62cfcdd6/f418ff3d-8a5f-4d78-994a-e40b62cfcdd6.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2228.233325] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7e8f2df-aa9a-4a72-b552-6ca8f1bd1d6b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.240296] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2228.240296] env[63279]: value = "task-2087811" [ 2228.240296] env[63279]: _type = "Task" [ 2228.240296] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2228.249161] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087811, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.564043] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805f1b04-af64-45cb-bfef-d6442a51bcac {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.574022] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b8e0f9-2a5e-4812-91e9-fcaaf6f1b50e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.611044] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461b6f1b-b750-42cb-a291-81d3dc28cafe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.622635] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66904f5b-7e4b-4226-8023-1ff5730dc83d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.627060] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087810, 'name': ReconfigVM_Task, 'duration_secs': 0.264805} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2228.627124] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 9a6abf10-ed12-49c6-aa16-f428da9f9f9d/9a6abf10-ed12-49c6-aa16-f428da9f9f9d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2228.628273] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb9df100-a0ed-487f-bd91-6a5d264df927 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.634412] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2228.634412] env[63279]: value = "task-2087812" [ 2228.634412] env[63279]: _type = "Task" [ 2228.634412] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2228.642575] env[63279]: DEBUG nova.compute.provider_tree [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2228.654481] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087812, 'name': Rename_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.737324] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2228.737694] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4c169cd-28de-4056-b1a1-279362a048fc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.746504] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2228.746504] env[63279]: value = "task-2087813" [ 2228.746504] env[63279]: _type = "Task" [ 2228.746504] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2228.752954] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087811, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490776} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2228.753577] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] f418ff3d-8a5f-4d78-994a-e40b62cfcdd6/f418ff3d-8a5f-4d78-994a-e40b62cfcdd6.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2228.753832] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2228.754099] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffd1ad55-015c-40c7-a2e5-9b3368e54a9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.758853] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087813, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.763990] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2228.763990] env[63279]: value = "task-2087814" [ 2228.763990] env[63279]: _type = "Task" [ 2228.763990] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2228.774362] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087814, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.148324] env[63279]: DEBUG nova.scheduler.client.report [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2229.155912] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087812, 'name': Rename_Task, 'duration_secs': 0.203831} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2229.156206] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2229.156451] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34eb651d-b09a-46e7-93f1-685792578bcd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.163247] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2229.163247] env[63279]: value = "task-2087815" [ 2229.163247] env[63279]: _type = "Task" [ 2229.163247] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.172636] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087815, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.258876] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087813, 'name': PowerOffVM_Task, 'duration_secs': 0.212279} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2229.259169] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2229.260055] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198f35ea-f047-4714-bcdb-af269e2d5ee2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.281193] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b556713c-e4d2-4cd3-8561-10e26a78ec36 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.288951] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087814, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069196} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2229.291287] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2229.293883] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0516d694-8ea3-4c28-aeb2-287937d2ac44 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.319391] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] f418ff3d-8a5f-4d78-994a-e40b62cfcdd6/f418ff3d-8a5f-4d78-994a-e40b62cfcdd6.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2229.321555] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c35324ce-9afa-4aa2-ad88-968d131ad8ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.341933] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2229.342223] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9c75b2d-8781-482b-8472-1df067d8b81c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.344860] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2229.344860] env[63279]: value = "task-2087816" [ 2229.344860] env[63279]: _type = "Task" [ 2229.344860] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.353778] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087816, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.354837] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2229.354837] env[63279]: value = "task-2087817" [ 2229.354837] env[63279]: _type = "Task" [ 2229.354837] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.362406] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] VM already powered off {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2229.362609] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2229.362857] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2229.363026] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2229.363284] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2229.363530] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f17be963-f9e8-41fa-b37a-ffb771bb638b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.372845] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2229.373034] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2229.373765] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c2c9439-ba1e-42f8-98d6-7e15548c0660 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.378456] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2229.378456] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52dffd0b-0023-ab1a-07cd-221ab40ddff1" [ 2229.378456] env[63279]: _type = "Task" [ 2229.378456] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.385426] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52dffd0b-0023-ab1a-07cd-221ab40ddff1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.654044] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.334s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2229.657246] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.293s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.659446] env[63279]: INFO nova.compute.claims [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2229.674356] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087815, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.699913] env[63279]: INFO nova.network.neutron [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updating port cd0a8ed6-d1ee-4266-8bde-e866ac2873ce with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2229.860108] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087816, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.893152] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52dffd0b-0023-ab1a-07cd-221ab40ddff1, 'name': SearchDatastore_Task, 'duration_secs': 0.016055} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2229.893152] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8f39293-6031-47b7-a0a3-5d488dc67db4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.899076] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2229.899076] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a9b1f0-3d4a-f716-6110-821103cf8e9c" [ 2229.899076] env[63279]: _type = "Task" [ 2229.899076] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.910179] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a9b1f0-3d4a-f716-6110-821103cf8e9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.181760] env[63279]: DEBUG oslo_vmware.api [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087815, 'name': PowerOnVM_Task, 'duration_secs': 0.86807} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.182212] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2230.182509] env[63279]: INFO nova.compute.manager [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Took 7.46 seconds to spawn the instance on the hypervisor. [ 2230.182789] env[63279]: DEBUG nova.compute.manager [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2230.183953] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adac199-3f22-4866-9547-2cfc18ee8057 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.356687] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087816, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.409408] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a9b1f0-3d4a-f716-6110-821103cf8e9c, 'name': SearchDatastore_Task, 'duration_secs': 0.025404} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.409682] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2230.409993] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 59ad6180-e561-41e3-86e4-37fc20819578/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk. {{(pid=63279) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2230.410273] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f80d9247-9873-4fff-a84a-f738439c1042 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.417234] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2230.417234] env[63279]: value = "task-2087818" [ 2230.417234] env[63279]: _type = "Task" [ 2230.417234] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.425937] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087818, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.710645] env[63279]: INFO nova.compute.manager [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Took 17.54 seconds to build instance. [ 2230.859691] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087816, 'name': ReconfigVM_Task, 'duration_secs': 1.225054} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.860105] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Reconfigured VM instance instance-0000005b to attach disk [datastore1] f418ff3d-8a5f-4d78-994a-e40b62cfcdd6/f418ff3d-8a5f-4d78-994a-e40b62cfcdd6.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2230.860796] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1355c46-1860-492b-b90c-1df18a8868af {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.868091] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2230.868091] env[63279]: value = "task-2087819" [ 2230.868091] env[63279]: _type = "Task" [ 2230.868091] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.881118] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087819, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.928741] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087818, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483984} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.932335] env[63279]: INFO nova.virt.vmwareapi.ds_util [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 59ad6180-e561-41e3-86e4-37fc20819578/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk. [ 2230.933370] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ce3aa0-cdc3-4250-aebd-f5ab5833ee60 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.962831] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 59ad6180-e561-41e3-86e4-37fc20819578/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2230.964161] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6f56a7b-3d07-4c66-a567-d13e988a28a0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.977538] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5722de9d-0394-4c9f-bb84-0d155c605780 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.986171] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c2c3d2-6a9d-4997-826a-09c736c47e05 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.989462] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2230.989462] env[63279]: value = "task-2087820" [ 2230.989462] env[63279]: _type = "Task" [ 2230.989462] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2231.024620] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2580e032-8d40-4542-8938-eafff53a6943 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.033852] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2231.040550] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce7de27-b323-427a-946e-56de8d392528 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.060165] env[63279]: DEBUG nova.compute.provider_tree [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2231.214901] env[63279]: DEBUG oslo_concurrency.lockutils [None req-66cd7ef7-f0f8-471c-a26b-5d90abc10497 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "9a6abf10-ed12-49c6-aa16-f428da9f9f9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.048s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.379931] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087819, 'name': Rename_Task, 'duration_secs': 0.141652} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2231.381600] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2231.381600] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ccd23c8-97e1-46b7-9763-be3d8ac74b1f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.386479] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2231.386479] env[63279]: value = "task-2087821" [ 2231.386479] env[63279]: _type = "Task" [ 2231.386479] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2231.393994] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087821, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2231.499489] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087820, 'name': ReconfigVM_Task, 'duration_secs': 0.382906} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2231.499797] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 59ad6180-e561-41e3-86e4-37fc20819578/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2231.500678] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d1912c-31d0-4c6e-8d21-647bb3a6d037 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.526343] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2d858cd-ae85-41e6-9c3e-274de64ba44d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.542723] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2231.542723] env[63279]: value = "task-2087822" [ 2231.542723] env[63279]: _type = "Task" [ 2231.542723] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2231.551468] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087822, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2231.598363] env[63279]: DEBUG nova.scheduler.client.report [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 131 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2231.599328] env[63279]: DEBUG nova.compute.provider_tree [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 131 to 132 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2231.600040] env[63279]: DEBUG nova.compute.provider_tree [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2231.704999] env[63279]: DEBUG nova.compute.manager [req-d94540b2-8e1f-477a-8d27-2969647a31eb req-084ba72e-59d8-4911-9802-d8f680025aaf service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Received event network-vif-plugged-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2231.705244] env[63279]: DEBUG oslo_concurrency.lockutils [req-d94540b2-8e1f-477a-8d27-2969647a31eb req-084ba72e-59d8-4911-9802-d8f680025aaf service nova] Acquiring lock "a15141bc-424d-48ca-a6d5-c859a3639a0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2231.705646] env[63279]: DEBUG oslo_concurrency.lockutils [req-d94540b2-8e1f-477a-8d27-2969647a31eb req-084ba72e-59d8-4911-9802-d8f680025aaf service nova] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2231.705888] env[63279]: DEBUG oslo_concurrency.lockutils [req-d94540b2-8e1f-477a-8d27-2969647a31eb req-084ba72e-59d8-4911-9802-d8f680025aaf service nova] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.706136] env[63279]: DEBUG nova.compute.manager [req-d94540b2-8e1f-477a-8d27-2969647a31eb req-084ba72e-59d8-4911-9802-d8f680025aaf service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] No waiting events found dispatching network-vif-plugged-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2231.706406] env[63279]: WARNING nova.compute.manager [req-d94540b2-8e1f-477a-8d27-2969647a31eb req-084ba72e-59d8-4911-9802-d8f680025aaf service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Received unexpected event network-vif-plugged-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce for instance with vm_state shelved_offloaded and task_state spawning. [ 2231.800840] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2231.801080] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2231.801257] env[63279]: DEBUG nova.network.neutron [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2231.897123] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087821, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2232.054909] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087822, 'name': ReconfigVM_Task, 'duration_secs': 0.1595} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2232.054909] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2232.054909] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2cfd1c1e-7936-4517-98de-2c2843cd572c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.061022] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2232.061022] env[63279]: value = "task-2087823" [ 2232.061022] env[63279]: _type = "Task" [ 2232.061022] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2232.067904] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087823, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2232.109390] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.449s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2232.109390] env[63279]: DEBUG nova.compute.manager [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2232.111025] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.943s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2232.112626] env[63279]: INFO nova.compute.claims [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2232.397851] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087821, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2232.569318] env[63279]: DEBUG oslo_vmware.api [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087823, 'name': PowerOnVM_Task, 'duration_secs': 0.411238} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2232.569637] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2232.572581] env[63279]: DEBUG nova.compute.manager [None req-d606847b-9abe-4529-9d05-e6cd26793b39 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2232.573408] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ff6f45-1849-4af3-8fdc-b52f408215af {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.619231] env[63279]: DEBUG nova.compute.utils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2232.626321] env[63279]: DEBUG nova.compute.manager [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2232.626494] env[63279]: DEBUG nova.network.neutron [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2232.802963] env[63279]: DEBUG nova.network.neutron [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updating instance_info_cache with network_info: [{"id": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "address": "fa:16:3e:fe:5c:41", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd0a8ed6-d1", "ovs_interfaceid": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2232.833783] env[63279]: DEBUG nova.policy [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '655d692da88947b89104e1f14f7d71f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5a719a21fe248c49d0d0151d218866b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2232.906933] env[63279]: DEBUG oslo_vmware.api [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087821, 'name': PowerOnVM_Task, 'duration_secs': 1.38485} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2232.907378] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2232.907662] env[63279]: INFO nova.compute.manager [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Took 12.57 seconds to spawn the instance on the hypervisor. [ 2232.907986] env[63279]: DEBUG nova.compute.manager [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2232.909261] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f40a37-a093-4e36-b851-58a769164bc3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.127048] env[63279]: DEBUG nova.compute.manager [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2233.195687] env[63279]: DEBUG nova.network.neutron [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Successfully created port: 29fc0fdf-41a6-471b-a08c-4e62bd3d372b {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2233.306233] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2233.357275] env[63279]: DEBUG nova.virt.hardware [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='df2854dd51a2359aba97de57ae67481d',container_format='bare',created_at=2025-01-13T17:58:08Z,direct_url=,disk_format='vmdk',id=f425cf74-2f77-4c7b-99ba-64ff14b01dc6,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1839827364-shelved',owner='131db3d2daa24712b6e11592cf789b33',properties=ImageMetaProps,protected=,size=31671296,status='active',tags=,updated_at=2025-01-13T17:58:23Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2233.357778] env[63279]: DEBUG nova.virt.hardware [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2233.357778] env[63279]: DEBUG nova.virt.hardware [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2233.358073] env[63279]: DEBUG nova.virt.hardware [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2233.358487] env[63279]: DEBUG nova.virt.hardware [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2233.358730] env[63279]: DEBUG nova.virt.hardware [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2233.359067] env[63279]: DEBUG nova.virt.hardware [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2233.359498] env[63279]: DEBUG nova.virt.hardware [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2233.359687] env[63279]: DEBUG nova.virt.hardware [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2233.360188] env[63279]: DEBUG nova.virt.hardware [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2233.360389] env[63279]: DEBUG nova.virt.hardware [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2233.361549] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f734db0e-e9f2-4ca9-9b60-0030034d8f0d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.373384] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532947f5-0fd9-413e-92b6-859ff009667f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.391034] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:5c:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd0a8ed6-d1ee-4266-8bde-e866ac2873ce', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2233.398841] env[63279]: DEBUG oslo.service.loopingcall [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2233.400141] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2233.400826] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020e0309-e6e7-4ed8-a879-8cdaf9d294f9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.403553] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e75addb-d8e7-4fdc-b5af-bb140e860750 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.423430] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "9a6abf10-ed12-49c6-aa16-f428da9f9f9d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2233.423430] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "9a6abf10-ed12-49c6-aa16-f428da9f9f9d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2233.423430] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "9a6abf10-ed12-49c6-aa16-f428da9f9f9d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2233.423430] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "9a6abf10-ed12-49c6-aa16-f428da9f9f9d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2233.423430] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "9a6abf10-ed12-49c6-aa16-f428da9f9f9d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2233.428079] env[63279]: INFO nova.compute.manager [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Terminating instance [ 2233.436608] env[63279]: INFO nova.compute.manager [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Took 20.91 seconds to build instance. [ 2233.436608] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53829073-c84f-4049-ad75-8acd9cce8d47 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.440694] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2233.440694] env[63279]: value = "task-2087824" [ 2233.440694] env[63279]: _type = "Task" [ 2233.440694] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2233.472738] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b5afa6-e5cb-4ebf-a83d-2a365bbf1842 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.479226] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087824, 'name': CreateVM_Task} progress is 15%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.484806] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d540cdc-0dde-4b22-a2d6-268e8ccb05fc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.501729] env[63279]: DEBUG nova.compute.provider_tree [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2233.737407] env[63279]: DEBUG nova.compute.manager [req-78c0b4a9-3830-44c6-b3b2-d2508204a1f1 req-838b5cae-2b39-4d22-81be-9f4758a05884 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Received event network-changed-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2233.737665] env[63279]: DEBUG nova.compute.manager [req-78c0b4a9-3830-44c6-b3b2-d2508204a1f1 req-838b5cae-2b39-4d22-81be-9f4758a05884 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Refreshing instance network info cache due to event network-changed-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2233.737947] env[63279]: DEBUG oslo_concurrency.lockutils [req-78c0b4a9-3830-44c6-b3b2-d2508204a1f1 req-838b5cae-2b39-4d22-81be-9f4758a05884 service nova] Acquiring lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2233.738269] env[63279]: DEBUG oslo_concurrency.lockutils [req-78c0b4a9-3830-44c6-b3b2-d2508204a1f1 req-838b5cae-2b39-4d22-81be-9f4758a05884 service nova] Acquired lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2233.738500] env[63279]: DEBUG nova.network.neutron [req-78c0b4a9-3830-44c6-b3b2-d2508204a1f1 req-838b5cae-2b39-4d22-81be-9f4758a05884 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Refreshing network info cache for port cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2233.940009] env[63279]: DEBUG oslo_concurrency.lockutils [None req-140984b1-6c87-4255-9ab0-f54ef3dd0bda tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.433s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2233.940681] env[63279]: DEBUG nova.compute.manager [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2233.940895] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2233.941839] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e417f9-45e4-4ed8-94ab-bd59e0bd6d0f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.952730] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2233.955960] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b80828c9-b1b2-4ced-9a78-4b3b23dfa0db {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.957519] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087824, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.962353] env[63279]: DEBUG oslo_vmware.api [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2233.962353] env[63279]: value = "task-2087825" [ 2233.962353] env[63279]: _type = "Task" [ 2233.962353] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2233.969745] env[63279]: DEBUG oslo_vmware.api [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087825, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2234.004697] env[63279]: DEBUG nova.scheduler.client.report [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2234.138816] env[63279]: DEBUG nova.compute.manager [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2234.161200] env[63279]: DEBUG nova.virt.hardware [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2234.161578] env[63279]: DEBUG nova.virt.hardware [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2234.161759] env[63279]: DEBUG nova.virt.hardware [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2234.161920] env[63279]: DEBUG nova.virt.hardware [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2234.162085] env[63279]: DEBUG nova.virt.hardware [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2234.162319] env[63279]: DEBUG nova.virt.hardware [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2234.162558] env[63279]: DEBUG nova.virt.hardware [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2234.162723] env[63279]: DEBUG nova.virt.hardware [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2234.162893] env[63279]: DEBUG nova.virt.hardware [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2234.163081] env[63279]: DEBUG nova.virt.hardware [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2234.163265] env[63279]: DEBUG nova.virt.hardware [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2234.164145] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f5c75e-b8bb-4966-b95f-a22590c19f9f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.172315] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b233c9fb-f048-42a8-a29e-d1922ad9d8be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.456510] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087824, 'name': CreateVM_Task, 'duration_secs': 0.531696} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2234.456693] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2234.457468] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f425cf74-2f77-4c7b-99ba-64ff14b01dc6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2234.457713] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f425cf74-2f77-4c7b-99ba-64ff14b01dc6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2234.458157] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f425cf74-2f77-4c7b-99ba-64ff14b01dc6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2234.458461] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ad924b7-e395-4851-b416-01c158dd5480 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.463679] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2234.463679] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52778572-6376-20f0-99a1-82ec9c609fdb" [ 2234.463679] env[63279]: _type = "Task" [ 2234.463679] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2234.480075] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52778572-6376-20f0-99a1-82ec9c609fdb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2234.480523] env[63279]: DEBUG oslo_vmware.api [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087825, 'name': PowerOffVM_Task, 'duration_secs': 0.468009} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2234.480976] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2234.481368] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2234.483026] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6337a424-a6de-4627-9905-93f45cca35e6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.510364] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.399s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2234.511559] env[63279]: DEBUG nova.compute.manager [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2234.520627] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.918s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.520627] env[63279]: DEBUG nova.objects.instance [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lazy-loading 'resources' on Instance uuid 14c698c8-7459-4843-bb19-f915742e3e53 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2234.560734] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2234.560734] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.560734] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2234.560734] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.560734] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2234.561729] env[63279]: INFO nova.compute.manager [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Terminating instance [ 2234.745121] env[63279]: DEBUG nova.compute.manager [req-2b7974a2-5b86-46c7-b458-f0e3c913c543 req-15a8a0e3-8578-400b-8885-b7109208ed02 service nova] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Received event network-vif-plugged-29fc0fdf-41a6-471b-a08c-4e62bd3d372b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2234.745121] env[63279]: DEBUG oslo_concurrency.lockutils [req-2b7974a2-5b86-46c7-b458-f0e3c913c543 req-15a8a0e3-8578-400b-8885-b7109208ed02 service nova] Acquiring lock "48794f65-355c-4cad-a83f-6b8cc327dd4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2234.745121] env[63279]: DEBUG oslo_concurrency.lockutils [req-2b7974a2-5b86-46c7-b458-f0e3c913c543 req-15a8a0e3-8578-400b-8885-b7109208ed02 service nova] Lock "48794f65-355c-4cad-a83f-6b8cc327dd4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.745121] env[63279]: DEBUG oslo_concurrency.lockutils [req-2b7974a2-5b86-46c7-b458-f0e3c913c543 req-15a8a0e3-8578-400b-8885-b7109208ed02 service nova] Lock "48794f65-355c-4cad-a83f-6b8cc327dd4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2234.745121] env[63279]: DEBUG nova.compute.manager [req-2b7974a2-5b86-46c7-b458-f0e3c913c543 req-15a8a0e3-8578-400b-8885-b7109208ed02 service nova] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] No waiting events found dispatching network-vif-plugged-29fc0fdf-41a6-471b-a08c-4e62bd3d372b {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2234.745121] env[63279]: WARNING nova.compute.manager [req-2b7974a2-5b86-46c7-b458-f0e3c913c543 req-15a8a0e3-8578-400b-8885-b7109208ed02 service nova] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Received unexpected event network-vif-plugged-29fc0fdf-41a6-471b-a08c-4e62bd3d372b for instance with vm_state building and task_state spawning. [ 2234.746103] env[63279]: DEBUG nova.network.neutron [req-78c0b4a9-3830-44c6-b3b2-d2508204a1f1 req-838b5cae-2b39-4d22-81be-9f4758a05884 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updated VIF entry in instance network info cache for port cd0a8ed6-d1ee-4266-8bde-e866ac2873ce. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2234.746597] env[63279]: DEBUG nova.network.neutron [req-78c0b4a9-3830-44c6-b3b2-d2508204a1f1 req-838b5cae-2b39-4d22-81be-9f4758a05884 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updating instance_info_cache with network_info: [{"id": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "address": "fa:16:3e:fe:5c:41", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd0a8ed6-d1", "ovs_interfaceid": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2234.840404] env[63279]: DEBUG nova.network.neutron [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Successfully updated port: 29fc0fdf-41a6-471b-a08c-4e62bd3d372b {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2234.934703] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "21aca39e-8513-49bd-93e9-0d101cee591f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2234.934939] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "21aca39e-8513-49bd-93e9-0d101cee591f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.979627] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f425cf74-2f77-4c7b-99ba-64ff14b01dc6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2234.979986] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Processing image f425cf74-2f77-4c7b-99ba-64ff14b01dc6 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2234.980110] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f425cf74-2f77-4c7b-99ba-64ff14b01dc6/f425cf74-2f77-4c7b-99ba-64ff14b01dc6.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2234.980256] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f425cf74-2f77-4c7b-99ba-64ff14b01dc6/f425cf74-2f77-4c7b-99ba-64ff14b01dc6.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2234.980442] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2234.980786] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f1dc11a-dae9-464d-884d-80b0a6e7d4e1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.997803] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2234.998017] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2234.998736] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9086894b-7612-4d4f-9673-11cad4a1d466 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.005819] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2235.005819] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521e36f2-a5e0-ac42-6189-d92c40521af0" [ 2235.005819] env[63279]: _type = "Task" [ 2235.005819] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.014625] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521e36f2-a5e0-ac42-6189-d92c40521af0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2235.021969] env[63279]: DEBUG nova.compute.utils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2235.026011] env[63279]: DEBUG nova.compute.manager [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2235.026186] env[63279]: DEBUG nova.network.neutron [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2235.066534] env[63279]: DEBUG nova.policy [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55199c771de04067a936194078ef99f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4fcef39e334249afb9636455802059c5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2235.068302] env[63279]: DEBUG nova.compute.manager [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2235.068584] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2235.069668] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87413250-bfe1-4fd1-977f-5ab8e9cad258 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.080690] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2235.080999] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cbfa5ea7-6b5c-472b-92ff-f3013e3a0e93 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.088365] env[63279]: DEBUG oslo_vmware.api [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2235.088365] env[63279]: value = "task-2087827" [ 2235.088365] env[63279]: _type = "Task" [ 2235.088365] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.096600] env[63279]: DEBUG oslo_vmware.api [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087827, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2235.252691] env[63279]: DEBUG oslo_concurrency.lockutils [req-78c0b4a9-3830-44c6-b3b2-d2508204a1f1 req-838b5cae-2b39-4d22-81be-9f4758a05884 service nova] Releasing lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2235.282904] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b97f8ef-03ed-4547-a6ed-b682fcef3cd5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.291055] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4aaed0-fe57-41c8-897b-37d7a373c2ea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.321738] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70889ab-f59c-4f8c-996c-c7757c4938f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.329410] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747d718a-4548-42a5-95d4-c75bb9d1b709 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.342922] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "refresh_cache-48794f65-355c-4cad-a83f-6b8cc327dd4d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2235.343096] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "refresh_cache-48794f65-355c-4cad-a83f-6b8cc327dd4d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2235.344363] env[63279]: DEBUG nova.network.neutron [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2235.348947] env[63279]: DEBUG nova.compute.provider_tree [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2235.400018] env[63279]: DEBUG nova.network.neutron [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Successfully created port: 3fe8ec9b-e8c9-486c-9d96-785b9a7cb897 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2235.438416] env[63279]: DEBUG nova.compute.manager [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2235.517530] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Preparing fetch location {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2235.517849] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Fetch image to [datastore1] OSTACK_IMG_8bf63f11-c67b-4d9b-8998-2812f08ce1fb/OSTACK_IMG_8bf63f11-c67b-4d9b-8998-2812f08ce1fb.vmdk {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2235.518099] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Downloading stream optimized image f425cf74-2f77-4c7b-99ba-64ff14b01dc6 to [datastore1] OSTACK_IMG_8bf63f11-c67b-4d9b-8998-2812f08ce1fb/OSTACK_IMG_8bf63f11-c67b-4d9b-8998-2812f08ce1fb.vmdk on the data store datastore1 as vApp {{(pid=63279) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2235.518308] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Downloading image file data f425cf74-2f77-4c7b-99ba-64ff14b01dc6 to the ESX as VM named 'OSTACK_IMG_8bf63f11-c67b-4d9b-8998-2812f08ce1fb' {{(pid=63279) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2235.529956] env[63279]: DEBUG nova.compute.manager [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2235.601022] env[63279]: DEBUG oslo_vmware.api [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087827, 'name': PowerOffVM_Task, 'duration_secs': 0.202567} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2235.602740] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2235.602920] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2235.603252] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5adbc209-5849-46f0-8098-5e353ac92877 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.614341] env[63279]: DEBUG oslo_vmware.rw_handles [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2235.614341] env[63279]: value = "resgroup-9" [ 2235.614341] env[63279]: _type = "ResourcePool" [ 2235.614341] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2235.614577] env[63279]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-eeddcc2e-240f-4375-a028-35b48e705af9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.634871] env[63279]: DEBUG oslo_vmware.rw_handles [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lease: (returnval){ [ 2235.634871] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5226fb74-7440-c59d-d472-9f0c7e89bd73" [ 2235.634871] env[63279]: _type = "HttpNfcLease" [ 2235.634871] env[63279]: } obtained for vApp import into resource pool (val){ [ 2235.634871] env[63279]: value = "resgroup-9" [ 2235.634871] env[63279]: _type = "ResourcePool" [ 2235.634871] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2235.635223] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the lease: (returnval){ [ 2235.635223] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5226fb74-7440-c59d-d472-9f0c7e89bd73" [ 2235.635223] env[63279]: _type = "HttpNfcLease" [ 2235.635223] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2235.640870] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2235.640870] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5226fb74-7440-c59d-d472-9f0c7e89bd73" [ 2235.640870] env[63279]: _type = "HttpNfcLease" [ 2235.640870] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2235.855534] env[63279]: DEBUG nova.scheduler.client.report [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2235.881409] env[63279]: DEBUG nova.network.neutron [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2235.959810] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2236.016415] env[63279]: DEBUG nova.network.neutron [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Updating instance_info_cache with network_info: [{"id": "29fc0fdf-41a6-471b-a08c-4e62bd3d372b", "address": "fa:16:3e:24:19:4c", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29fc0fdf-41", "ovs_interfaceid": "29fc0fdf-41a6-471b-a08c-4e62bd3d372b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2236.106918] env[63279]: INFO nova.compute.manager [None req-b03110fc-4491-4829-90b4-ccf11554ef8a tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Get console output [ 2236.107197] env[63279]: WARNING nova.virt.vmwareapi.driver [None req-b03110fc-4491-4829-90b4-ccf11554ef8a tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] The console log is missing. Check your VSPC configuration [ 2236.143087] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2236.143087] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5226fb74-7440-c59d-d472-9f0c7e89bd73" [ 2236.143087] env[63279]: _type = "HttpNfcLease" [ 2236.143087] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2236.361108] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.846s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.363746] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.404s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2236.365633] env[63279]: INFO nova.compute.claims [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2236.380046] env[63279]: INFO nova.scheduler.client.report [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Deleted allocations for instance 14c698c8-7459-4843-bb19-f915742e3e53 [ 2236.518759] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "refresh_cache-48794f65-355c-4cad-a83f-6b8cc327dd4d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2236.519106] env[63279]: DEBUG nova.compute.manager [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Instance network_info: |[{"id": "29fc0fdf-41a6-471b-a08c-4e62bd3d372b", "address": "fa:16:3e:24:19:4c", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29fc0fdf-41", "ovs_interfaceid": "29fc0fdf-41a6-471b-a08c-4e62bd3d372b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2236.519558] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:19:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29fc0fdf-41a6-471b-a08c-4e62bd3d372b', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2236.526946] env[63279]: DEBUG oslo.service.loopingcall [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2236.527170] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2236.527395] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e576255f-7ed3-4feb-9787-19e62a9854a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.543019] env[63279]: DEBUG nova.compute.manager [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2236.550864] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2236.550864] env[63279]: value = "task-2087830" [ 2236.550864] env[63279]: _type = "Task" [ 2236.550864] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2236.558531] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2236.566813] env[63279]: DEBUG nova.virt.hardware [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2236.567059] env[63279]: DEBUG nova.virt.hardware [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2236.567222] env[63279]: DEBUG nova.virt.hardware [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2236.567407] env[63279]: DEBUG nova.virt.hardware [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2236.567557] env[63279]: DEBUG nova.virt.hardware [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2236.567763] env[63279]: DEBUG nova.virt.hardware [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2236.567913] env[63279]: DEBUG nova.virt.hardware [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2236.568088] env[63279]: DEBUG nova.virt.hardware [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2236.568263] env[63279]: DEBUG nova.virt.hardware [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2236.568429] env[63279]: DEBUG nova.virt.hardware [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2236.568604] env[63279]: DEBUG nova.virt.hardware [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2236.569373] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee1c81d-76f8-453f-9e9b-76513cc06674 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.580038] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6bdbc6-7016-4f8b-be70-91475ee278b6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.644629] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2236.644629] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5226fb74-7440-c59d-d472-9f0c7e89bd73" [ 2236.644629] env[63279]: _type = "HttpNfcLease" [ 2236.644629] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2236.644938] env[63279]: DEBUG oslo_vmware.rw_handles [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2236.644938] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5226fb74-7440-c59d-d472-9f0c7e89bd73" [ 2236.644938] env[63279]: _type = "HttpNfcLease" [ 2236.644938] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2236.645680] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f48677b-f12e-49ba-a619-6cd5ed42d3c9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.654348] env[63279]: DEBUG oslo_vmware.rw_handles [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52654029-5b92-4ab9-aca0-13992d0cc199/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2236.655093] env[63279]: DEBUG oslo_vmware.rw_handles [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Creating HTTP connection to write to file with size = 31671296 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52654029-5b92-4ab9-aca0-13992d0cc199/disk-0.vmdk. {{(pid=63279) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2236.731032] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a7be3f99-3eb5-403c-853c-d2d96a87f98a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.776684] env[63279]: DEBUG nova.compute.manager [req-76f3a11d-df44-468e-a7ed-87e2629706ef req-9d14daa4-be7b-49b0-b121-10a18bf21bb5 service nova] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Received event network-changed-29fc0fdf-41a6-471b-a08c-4e62bd3d372b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2236.776928] env[63279]: DEBUG nova.compute.manager [req-76f3a11d-df44-468e-a7ed-87e2629706ef req-9d14daa4-be7b-49b0-b121-10a18bf21bb5 service nova] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Refreshing instance network info cache due to event network-changed-29fc0fdf-41a6-471b-a08c-4e62bd3d372b. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2236.777226] env[63279]: DEBUG oslo_concurrency.lockutils [req-76f3a11d-df44-468e-a7ed-87e2629706ef req-9d14daa4-be7b-49b0-b121-10a18bf21bb5 service nova] Acquiring lock "refresh_cache-48794f65-355c-4cad-a83f-6b8cc327dd4d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2236.777427] env[63279]: DEBUG oslo_concurrency.lockutils [req-76f3a11d-df44-468e-a7ed-87e2629706ef req-9d14daa4-be7b-49b0-b121-10a18bf21bb5 service nova] Acquired lock "refresh_cache-48794f65-355c-4cad-a83f-6b8cc327dd4d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2236.777633] env[63279]: DEBUG nova.network.neutron [req-76f3a11d-df44-468e-a7ed-87e2629706ef req-9d14daa4-be7b-49b0-b121-10a18bf21bb5 service nova] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Refreshing network info cache for port 29fc0fdf-41a6-471b-a08c-4e62bd3d372b {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2236.887913] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d395c1d6-e874-4b9b-bf47-a88c46bb86f6 tempest-ImagesTestJSON-604336972 tempest-ImagesTestJSON-604336972-project-member] Lock "14c698c8-7459-4843-bb19-f915742e3e53" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.282s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.062577] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.238518] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "0e12ab9b-a701-4e0f-9d96-939090f50494" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.238518] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "0e12ab9b-a701-4e0f-9d96-939090f50494" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.238518] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "0e12ab9b-a701-4e0f-9d96-939090f50494-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.238518] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "0e12ab9b-a701-4e0f-9d96-939090f50494-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.238829] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "0e12ab9b-a701-4e0f-9d96-939090f50494-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.240961] env[63279]: INFO nova.compute.manager [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Terminating instance [ 2237.562462] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.673949] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b2cfc2-e549-4a3f-9fd5-2e2c16b9402d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.685271] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd8289e-dc04-4153-be9a-1c4237563977 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.732045] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc462a33-5308-4633-84ce-7ab6dff3fa24 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.736591] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5862c5ee-4509-4196-a45a-cb81bf2d1c80 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.744470] env[63279]: DEBUG nova.network.neutron [req-76f3a11d-df44-468e-a7ed-87e2629706ef req-9d14daa4-be7b-49b0-b121-10a18bf21bb5 service nova] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Updated VIF entry in instance network info cache for port 29fc0fdf-41a6-471b-a08c-4e62bd3d372b. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2237.744470] env[63279]: DEBUG nova.network.neutron [req-76f3a11d-df44-468e-a7ed-87e2629706ef req-9d14daa4-be7b-49b0-b121-10a18bf21bb5 service nova] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Updating instance_info_cache with network_info: [{"id": "29fc0fdf-41a6-471b-a08c-4e62bd3d372b", "address": "fa:16:3e:24:19:4c", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29fc0fdf-41", "ovs_interfaceid": "29fc0fdf-41a6-471b-a08c-4e62bd3d372b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2237.745449] env[63279]: DEBUG nova.compute.manager [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2237.745713] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2237.747074] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d03b89-c43e-4fc6-bf43-ec0c43b69065 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.760011] env[63279]: DEBUG nova.compute.provider_tree [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2237.770533] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2237.770853] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c6c5507-ed92-4890-b05f-53650a7b4bd5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.779912] env[63279]: DEBUG oslo_vmware.api [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 2237.779912] env[63279]: value = "task-2087831" [ 2237.779912] env[63279]: _type = "Task" [ 2237.779912] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2237.790892] env[63279]: DEBUG oslo_vmware.api [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.833480] env[63279]: DEBUG oslo_vmware.rw_handles [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Completed reading data from the image iterator. {{(pid=63279) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2237.833728] env[63279]: DEBUG oslo_vmware.rw_handles [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52654029-5b92-4ab9-aca0-13992d0cc199/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2237.834601] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91dc2a8-4533-4cfc-99a0-817c727682cc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.841126] env[63279]: DEBUG oslo_vmware.rw_handles [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52654029-5b92-4ab9-aca0-13992d0cc199/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2237.841296] env[63279]: DEBUG oslo_vmware.rw_handles [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52654029-5b92-4ab9-aca0-13992d0cc199/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2237.841520] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-b2f24209-2fa0-453e-a039-dd9d56516ab6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.059786] env[63279]: DEBUG oslo_vmware.rw_handles [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52654029-5b92-4ab9-aca0-13992d0cc199/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2238.060304] env[63279]: INFO nova.virt.vmwareapi.images [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Downloaded image file data f425cf74-2f77-4c7b-99ba-64ff14b01dc6 [ 2238.061722] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620d333f-146a-4e23-a0a4-6683edbfa71b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.067721] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2238.081894] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91008e49-8bd2-4a30-b9fb-002ca6b87861 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.104953] env[63279]: INFO nova.virt.vmwareapi.images [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] The imported VM was unregistered [ 2238.108461] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Caching image {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2238.111024] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Creating directory with path [datastore1] devstack-image-cache_base/f425cf74-2f77-4c7b-99ba-64ff14b01dc6 {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2238.111024] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6671e9ac-9ddc-4dc9-a0f4-260c3203fc73 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.125020] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Created directory with path [datastore1] devstack-image-cache_base/f425cf74-2f77-4c7b-99ba-64ff14b01dc6 {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2238.125020] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_8bf63f11-c67b-4d9b-8998-2812f08ce1fb/OSTACK_IMG_8bf63f11-c67b-4d9b-8998-2812f08ce1fb.vmdk to [datastore1] devstack-image-cache_base/f425cf74-2f77-4c7b-99ba-64ff14b01dc6/f425cf74-2f77-4c7b-99ba-64ff14b01dc6.vmdk. {{(pid=63279) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2238.125020] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-1c31e1d5-1e6a-410c-8966-8a8c21b58804 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.130617] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2238.130617] env[63279]: value = "task-2087833" [ 2238.130617] env[63279]: _type = "Task" [ 2238.130617] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2238.138550] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087833, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2238.247871] env[63279]: DEBUG oslo_concurrency.lockutils [req-76f3a11d-df44-468e-a7ed-87e2629706ef req-9d14daa4-be7b-49b0-b121-10a18bf21bb5 service nova] Releasing lock "refresh_cache-48794f65-355c-4cad-a83f-6b8cc327dd4d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2238.270018] env[63279]: DEBUG nova.scheduler.client.report [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2238.293655] env[63279]: DEBUG oslo_vmware.api [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087831, 'name': PowerOffVM_Task, 'duration_secs': 0.298834} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2238.294207] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2238.295893] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2238.295893] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-664fae75-242c-4c6c-9756-fa736fc759c4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.564234] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2238.641711] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087833, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2238.773802] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2238.774373] env[63279]: DEBUG nova.compute.manager [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2239.064311] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.143011] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087833, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.280448] env[63279]: DEBUG nova.compute.utils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2239.282334] env[63279]: DEBUG nova.compute.manager [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2239.282728] env[63279]: DEBUG nova.network.neutron [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2239.325271] env[63279]: DEBUG nova.policy [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6615b44501ff4c23a373c60aaf443f5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afe3ab970f3249719809afa9f9b11c1b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2239.567146] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.646738] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087833, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.679457] env[63279]: DEBUG nova.network.neutron [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Successfully created port: d6d02bd7-d44f-434c-8e3f-5bdd56aed938 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2239.788024] env[63279]: DEBUG nova.compute.manager [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2240.073250] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.149520] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087833, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.567997] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.644529] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087833, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.429817} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2240.644794] env[63279]: INFO nova.virt.vmwareapi.ds_util [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_8bf63f11-c67b-4d9b-8998-2812f08ce1fb/OSTACK_IMG_8bf63f11-c67b-4d9b-8998-2812f08ce1fb.vmdk to [datastore1] devstack-image-cache_base/f425cf74-2f77-4c7b-99ba-64ff14b01dc6/f425cf74-2f77-4c7b-99ba-64ff14b01dc6.vmdk. [ 2240.644975] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Cleaning up location [datastore1] OSTACK_IMG_8bf63f11-c67b-4d9b-8998-2812f08ce1fb {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2240.646815] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_8bf63f11-c67b-4d9b-8998-2812f08ce1fb {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2240.647101] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-829d4a6c-106d-4817-89af-47620b64df14 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.653432] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2240.653432] env[63279]: value = "task-2087835" [ 2240.653432] env[63279]: _type = "Task" [ 2240.653432] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2240.662706] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087835, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.749552] env[63279]: DEBUG nova.compute.manager [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2240.750542] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b877f3-d8c1-4a96-a1c9-a986c671929e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.799182] env[63279]: DEBUG nova.compute.manager [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2240.825031] env[63279]: DEBUG nova.virt.hardware [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2240.825285] env[63279]: DEBUG nova.virt.hardware [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2240.825446] env[63279]: DEBUG nova.virt.hardware [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2240.825630] env[63279]: DEBUG nova.virt.hardware [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2240.825781] env[63279]: DEBUG nova.virt.hardware [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2240.825966] env[63279]: DEBUG nova.virt.hardware [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2240.826180] env[63279]: DEBUG nova.virt.hardware [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2240.826353] env[63279]: DEBUG nova.virt.hardware [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2240.826525] env[63279]: DEBUG nova.virt.hardware [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2240.826692] env[63279]: DEBUG nova.virt.hardware [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2240.826872] env[63279]: DEBUG nova.virt.hardware [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2240.828213] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03500190-814e-4454-947a-6e054ece213b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.836565] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db187fbf-0049-4f74-b84d-97aeb3e43d2a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.067937] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.165417] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087835, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034512} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2241.166413] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2241.166413] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f425cf74-2f77-4c7b-99ba-64ff14b01dc6/f425cf74-2f77-4c7b-99ba-64ff14b01dc6.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2241.166413] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f425cf74-2f77-4c7b-99ba-64ff14b01dc6/f425cf74-2f77-4c7b-99ba-64ff14b01dc6.vmdk to [datastore1] a15141bc-424d-48ca-a6d5-c859a3639a0b/a15141bc-424d-48ca-a6d5-c859a3639a0b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2241.166413] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c102500-5a52-4470-aa15-3d35527a3915 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.173068] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2241.173068] env[63279]: value = "task-2087836" [ 2241.173068] env[63279]: _type = "Task" [ 2241.173068] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2241.181355] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087836, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.260868] env[63279]: INFO nova.compute.manager [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] instance snapshotting [ 2241.261548] env[63279]: DEBUG nova.objects.instance [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'flavor' on Instance uuid 686a7ce2-2d07-411e-91d6-0471c55c3728 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2241.568933] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.684099] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087836, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.769634] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df02022-78db-4773-92c6-28083b0c2920 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.792596] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8caae917-bcd3-4fa6-bf83-4af2983b6782 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.068834] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.184426] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087836, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.302411] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Acquiring lock "ecec02e8-8ddf-4997-9547-ccee0db1938b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2242.302667] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Lock "ecec02e8-8ddf-4997-9547-ccee0db1938b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2242.304916] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2242.305293] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-cc87222d-9803-4ad2-aac6-b05bd35c603d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.314461] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2242.314461] env[63279]: value = "task-2087837" [ 2242.314461] env[63279]: _type = "Task" [ 2242.314461] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2242.326151] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087837, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.571745] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.685889] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087836, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.807186] env[63279]: DEBUG nova.compute.manager [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2242.827077] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087837, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.072052] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.186324] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087836, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.326583] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087837, 'name': CreateSnapshot_Task, 'duration_secs': 0.596196} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2243.326916] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2243.327609] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7a459d-1ec5-4cdb-b100-656d6940e7ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.331345] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2243.331345] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2243.332883] env[63279]: INFO nova.compute.claims [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2243.571958] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.686685] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087836, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.30655} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2243.686967] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f425cf74-2f77-4c7b-99ba-64ff14b01dc6/f425cf74-2f77-4c7b-99ba-64ff14b01dc6.vmdk to [datastore1] a15141bc-424d-48ca-a6d5-c859a3639a0b/a15141bc-424d-48ca-a6d5-c859a3639a0b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2243.687765] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be9733e-866b-4377-838b-f8b11d41b6ed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.711342] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] a15141bc-424d-48ca-a6d5-c859a3639a0b/a15141bc-424d-48ca-a6d5-c859a3639a0b.vmdk or device None with type streamOptimized {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2243.711686] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1568ea28-c988-4e03-9211-c90bc68a7ecc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.728992] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2243.729523] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2243.729793] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleting the datastore file [datastore1] 9a6abf10-ed12-49c6-aa16-f428da9f9f9d {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2243.730121] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2243.730297] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2243.730547] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Deleting the datastore file [datastore1] 0e12ab9b-a701-4e0f-9d96-939090f50494 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2243.731148] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5ac85ac-6c9f-4688-b74c-1fdde71f6a41 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.732863] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0653182-459b-498d-a4ca-1f6abf0955d5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.737360] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2243.737360] env[63279]: value = "task-2087838" [ 2243.737360] env[63279]: _type = "Task" [ 2243.737360] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.742867] env[63279]: DEBUG oslo_vmware.api [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for the task: (returnval){ [ 2243.742867] env[63279]: value = "task-2087840" [ 2243.742867] env[63279]: _type = "Task" [ 2243.742867] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.746487] env[63279]: DEBUG oslo_vmware.api [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for the task: (returnval){ [ 2243.746487] env[63279]: value = "task-2087839" [ 2243.746487] env[63279]: _type = "Task" [ 2243.746487] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.746764] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087838, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.757869] env[63279]: DEBUG oslo_vmware.api [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087840, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.760266] env[63279]: DEBUG oslo_vmware.api [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087839, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.815314] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2243.815682] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2243.815927] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Deleting the datastore file [datastore1] f418ff3d-8a5f-4d78-994a-e40b62cfcdd6 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2243.816312] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ad763d5-b5bf-45f3-bb36-5be19dd9266c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.823332] env[63279]: DEBUG oslo_vmware.api [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for the task: (returnval){ [ 2243.823332] env[63279]: value = "task-2087841" [ 2243.823332] env[63279]: _type = "Task" [ 2243.823332] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.832195] env[63279]: DEBUG oslo_vmware.api [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087841, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.851761] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2243.852789] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7fd94d2a-955a-4864-974f-dce9107cdd63 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.860982] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2243.860982] env[63279]: value = "task-2087842" [ 2243.860982] env[63279]: _type = "Task" [ 2243.860982] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.870518] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087842, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.072885] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087830, 'name': CreateVM_Task, 'duration_secs': 7.426369} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.073095] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2244.073791] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2244.073960] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2244.074348] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2244.074588] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bd3a8cc-fcf8-4a44-9835-132d1af24d58 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.079191] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2244.079191] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52509c39-df6b-9d41-e09d-db5d8b7425f7" [ 2244.079191] env[63279]: _type = "Task" [ 2244.079191] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.087294] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52509c39-df6b-9d41-e09d-db5d8b7425f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.248321] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087838, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.259728] env[63279]: DEBUG oslo_vmware.api [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Task: {'id': task-2087840, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.506335} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.262894] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2244.263110] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2244.263298] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2244.263476] env[63279]: INFO nova.compute.manager [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Took 6.52 seconds to destroy the instance on the hypervisor. [ 2244.264037] env[63279]: DEBUG oslo.service.loopingcall [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2244.264037] env[63279]: DEBUG oslo_vmware.api [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Task: {'id': task-2087839, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.318781} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.264189] env[63279]: DEBUG nova.compute.manager [-] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2244.264235] env[63279]: DEBUG nova.network.neutron [-] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2244.265912] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2244.266119] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2244.266295] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2244.266469] env[63279]: INFO nova.compute.manager [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Took 10.33 seconds to destroy the instance on the hypervisor. [ 2244.266688] env[63279]: DEBUG oslo.service.loopingcall [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2244.266879] env[63279]: DEBUG nova.compute.manager [-] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2244.266973] env[63279]: DEBUG nova.network.neutron [-] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2244.337266] env[63279]: DEBUG oslo_vmware.api [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087841, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.373881] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087842, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.570862] env[63279]: DEBUG nova.compute.manager [req-67bd2940-3f27-4991-bc1d-7b03d312aabb req-80085d99-6cf5-41b3-a840-719ff2002b40 service nova] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Received event network-vif-deleted-7707bb8d-19ea-4514-a533-b6a5829b6231 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2244.570862] env[63279]: INFO nova.compute.manager [req-67bd2940-3f27-4991-bc1d-7b03d312aabb req-80085d99-6cf5-41b3-a840-719ff2002b40 service nova] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Neutron deleted interface 7707bb8d-19ea-4514-a533-b6a5829b6231; detaching it from the instance and deleting it from the info cache [ 2244.570862] env[63279]: DEBUG nova.network.neutron [req-67bd2940-3f27-4991-bc1d-7b03d312aabb req-80085d99-6cf5-41b3-a840-719ff2002b40 service nova] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2244.590820] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52509c39-df6b-9d41-e09d-db5d8b7425f7, 'name': SearchDatastore_Task, 'duration_secs': 0.034985} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.593952] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2244.594143] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2244.594405] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2244.594560] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2244.594745] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2244.596012] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9df50de-8629-495b-9c84-ea15a4988fd2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.605603] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2244.605888] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2244.606630] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92ab0133-37cd-47e0-8337-d323dd2bc396 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.615425] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cecb3e2-4a8d-4e43-a2f3-2a39167a5e6c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.625541] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f005f7f-385c-49a1-b95b-69738126271d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.629878] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2244.629878] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ef4eaf-6140-0ec9-0deb-4b1a63448e12" [ 2244.629878] env[63279]: _type = "Task" [ 2244.629878] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.661180] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9274070-d74e-4fd9-9111-7cf865cb2f1d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.667766] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ef4eaf-6140-0ec9-0deb-4b1a63448e12, 'name': SearchDatastore_Task, 'duration_secs': 0.010697} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.668850] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29c04982-a669-476c-8d6b-59448e091daf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.675094] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342317b5-cec6-4f1d-a4a2-7169da3fd0e8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.681166] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2244.681166] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ad9680-9d37-84b6-ecfa-0de84fe282a8" [ 2244.681166] env[63279]: _type = "Task" [ 2244.681166] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.692550] env[63279]: DEBUG nova.compute.provider_tree [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2244.698942] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ad9680-9d37-84b6-ecfa-0de84fe282a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.749024] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087838, 'name': ReconfigVM_Task, 'duration_secs': 0.607193} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.750064] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Reconfigured VM instance instance-00000040 to attach disk [datastore1] a15141bc-424d-48ca-a6d5-c859a3639a0b/a15141bc-424d-48ca-a6d5-c859a3639a0b.vmdk or device None with type streamOptimized {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2244.751488] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_secret_uuid': None, 'disk_bus': None, 'size': 0, 'encryption_format': None, 'guest_format': None, 'encryption_options': None, 'encrypted': False, 'device_name': '/dev/sda', 'boot_index': 0, 'device_type': 'disk', 'image_id': '30887889-e45b-4f67-8b3c-16216e594a90'}], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sdb', 'delete_on_termination': False, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427738', 'volume_id': 'd14dfff0-b332-4aa4-94bb-62b564929c56', 'name': 'volume-d14dfff0-b332-4aa4-94bb-62b564929c56', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'a15141bc-424d-48ca-a6d5-c859a3639a0b', 'attached_at': '', 'detached_at': '', 'volume_id': 'd14dfff0-b332-4aa4-94bb-62b564929c56', 'serial': 'd14dfff0-b332-4aa4-94bb-62b564929c56'}, 'boot_index': None, 'device_type': None, 'attachment_id': '599569f0-2ce4-4f28-8a2d-6c3522d3dd3a', 'volume_type': None}], 'swap': None} {{(pid=63279) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2244.751488] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2244.751488] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427738', 'volume_id': 'd14dfff0-b332-4aa4-94bb-62b564929c56', 'name': 'volume-d14dfff0-b332-4aa4-94bb-62b564929c56', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'a15141bc-424d-48ca-a6d5-c859a3639a0b', 'attached_at': '', 'detached_at': '', 'volume_id': 'd14dfff0-b332-4aa4-94bb-62b564929c56', 'serial': 'd14dfff0-b332-4aa4-94bb-62b564929c56'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2244.752095] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba4abbe-a2f3-4678-b524-3b01a0b603dc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.771644] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab4ccff-5a39-4ffa-96b6-7d608ba010c9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.796576] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] volume-d14dfff0-b332-4aa4-94bb-62b564929c56/volume-d14dfff0-b332-4aa4-94bb-62b564929c56.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2244.797143] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6471b1d-6120-4379-b379-e0d2cb075e76 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.816256] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2244.816256] env[63279]: value = "task-2087843" [ 2244.816256] env[63279]: _type = "Task" [ 2244.816256] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.825587] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087843, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.833384] env[63279]: DEBUG oslo_vmware.api [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Task: {'id': task-2087841, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.528819} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.833758] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2244.834037] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2244.834418] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2244.834552] env[63279]: INFO nova.compute.manager [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Took 9.77 seconds to destroy the instance on the hypervisor. [ 2244.834884] env[63279]: DEBUG oslo.service.loopingcall [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2244.835144] env[63279]: DEBUG nova.compute.manager [-] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2244.835243] env[63279]: DEBUG nova.network.neutron [-] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2244.877493] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087842, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.040247] env[63279]: DEBUG nova.compute.manager [req-7d759894-5f82-4da1-9ae4-61f41d115668 req-51c556b4-437c-4e22-9485-83d661546f69 service nova] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Received event network-vif-plugged-3fe8ec9b-e8c9-486c-9d96-785b9a7cb897 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2245.040503] env[63279]: DEBUG oslo_concurrency.lockutils [req-7d759894-5f82-4da1-9ae4-61f41d115668 req-51c556b4-437c-4e22-9485-83d661546f69 service nova] Acquiring lock "6b4a94f9-1cd9-466f-844f-8d692b732abc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2245.040732] env[63279]: DEBUG oslo_concurrency.lockutils [req-7d759894-5f82-4da1-9ae4-61f41d115668 req-51c556b4-437c-4e22-9485-83d661546f69 service nova] Lock "6b4a94f9-1cd9-466f-844f-8d692b732abc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2245.041190] env[63279]: DEBUG oslo_concurrency.lockutils [req-7d759894-5f82-4da1-9ae4-61f41d115668 req-51c556b4-437c-4e22-9485-83d661546f69 service nova] Lock "6b4a94f9-1cd9-466f-844f-8d692b732abc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.041423] env[63279]: DEBUG nova.compute.manager [req-7d759894-5f82-4da1-9ae4-61f41d115668 req-51c556b4-437c-4e22-9485-83d661546f69 service nova] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] No waiting events found dispatching network-vif-plugged-3fe8ec9b-e8c9-486c-9d96-785b9a7cb897 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2245.041603] env[63279]: WARNING nova.compute.manager [req-7d759894-5f82-4da1-9ae4-61f41d115668 req-51c556b4-437c-4e22-9485-83d661546f69 service nova] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Received unexpected event network-vif-plugged-3fe8ec9b-e8c9-486c-9d96-785b9a7cb897 for instance with vm_state building and task_state spawning. [ 2245.042461] env[63279]: DEBUG nova.network.neutron [-] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2245.074055] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ba194d5-5f63-4b73-a396-4ee02092d446 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.081510] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61dbd683-b8e8-4924-8744-cac015cf4512 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.118998] env[63279]: DEBUG nova.compute.manager [req-67bd2940-3f27-4991-bc1d-7b03d312aabb req-80085d99-6cf5-41b3-a840-719ff2002b40 service nova] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Detach interface failed, port_id=7707bb8d-19ea-4514-a533-b6a5829b6231, reason: Instance 9a6abf10-ed12-49c6-aa16-f428da9f9f9d could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2245.197732] env[63279]: DEBUG nova.scheduler.client.report [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2245.204963] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ad9680-9d37-84b6-ecfa-0de84fe282a8, 'name': SearchDatastore_Task, 'duration_secs': 0.028861} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.204963] env[63279]: DEBUG nova.network.neutron [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Successfully updated port: d6d02bd7-d44f-434c-8e3f-5bdd56aed938 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2245.205406] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2245.205851] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 48794f65-355c-4cad-a83f-6b8cc327dd4d/48794f65-355c-4cad-a83f-6b8cc327dd4d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2245.209110] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-acfb9cda-b9e1-474a-bf1b-1066ea5ed51d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.216049] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2245.216049] env[63279]: value = "task-2087844" [ 2245.216049] env[63279]: _type = "Task" [ 2245.216049] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.232428] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.326143] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087843, 'name': ReconfigVM_Task, 'duration_secs': 0.489591} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.326508] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Reconfigured VM instance instance-00000040 to attach disk [datastore1] volume-d14dfff0-b332-4aa4-94bb-62b564929c56/volume-d14dfff0-b332-4aa4-94bb-62b564929c56.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2245.331810] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-563e9828-eeec-4d49-a916-7980cda59092 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.350914] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2245.350914] env[63279]: value = "task-2087845" [ 2245.350914] env[63279]: _type = "Task" [ 2245.350914] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.367883] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087845, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.379165] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087842, 'name': CloneVM_Task} progress is 95%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.427960] env[63279]: DEBUG nova.network.neutron [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Successfully updated port: 3fe8ec9b-e8c9-486c-9d96-785b9a7cb897 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2245.547859] env[63279]: INFO nova.compute.manager [-] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Took 1.28 seconds to deallocate network for instance. [ 2245.708706] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.375s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.708706] env[63279]: DEBUG nova.compute.manager [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2245.711637] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "refresh_cache-21aca39e-8513-49bd-93e9-0d101cee591f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2245.711767] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquired lock "refresh_cache-21aca39e-8513-49bd-93e9-0d101cee591f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2245.711912] env[63279]: DEBUG nova.network.neutron [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2245.727290] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087844, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.761442] env[63279]: DEBUG nova.network.neutron [-] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2245.864114] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087845, 'name': ReconfigVM_Task, 'duration_secs': 0.184537} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.864461] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427738', 'volume_id': 'd14dfff0-b332-4aa4-94bb-62b564929c56', 'name': 'volume-d14dfff0-b332-4aa4-94bb-62b564929c56', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'a15141bc-424d-48ca-a6d5-c859a3639a0b', 'attached_at': '', 'detached_at': '', 'volume_id': 'd14dfff0-b332-4aa4-94bb-62b564929c56', 'serial': 'd14dfff0-b332-4aa4-94bb-62b564929c56'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2245.865089] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82e9a25a-4af4-46a2-9a20-0afe17091640 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.878916] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087842, 'name': CloneVM_Task, 'duration_secs': 1.635872} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.880400] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Created linked-clone VM from snapshot [ 2245.883511] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2245.883511] env[63279]: value = "task-2087846" [ 2245.883511] env[63279]: _type = "Task" [ 2245.883511] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.884238] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc203213-2d13-446d-90d4-c3faf3d5c8d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.894545] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Uploading image fbac5115-107e-4a20-ba81-7d5fa6c21bc8 {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2245.901860] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087846, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.928044] env[63279]: DEBUG oslo_vmware.rw_handles [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2245.928044] env[63279]: value = "vm-427748" [ 2245.928044] env[63279]: _type = "VirtualMachine" [ 2245.928044] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2245.928044] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b1b7e863-8c13-4aa2-8a0e-e18f7e8e88ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.935939] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "refresh_cache-6b4a94f9-1cd9-466f-844f-8d692b732abc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2245.935939] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "refresh_cache-6b4a94f9-1cd9-466f-844f-8d692b732abc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2245.935939] env[63279]: DEBUG nova.network.neutron [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2245.936881] env[63279]: DEBUG oslo_vmware.rw_handles [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lease: (returnval){ [ 2245.936881] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c1360c-f3e8-e79e-0982-bf39e8ed3c88" [ 2245.936881] env[63279]: _type = "HttpNfcLease" [ 2245.936881] env[63279]: } obtained for exporting VM: (result){ [ 2245.936881] env[63279]: value = "vm-427748" [ 2245.936881] env[63279]: _type = "VirtualMachine" [ 2245.936881] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2245.937165] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the lease: (returnval){ [ 2245.937165] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c1360c-f3e8-e79e-0982-bf39e8ed3c88" [ 2245.937165] env[63279]: _type = "HttpNfcLease" [ 2245.937165] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2245.944488] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2245.944488] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c1360c-f3e8-e79e-0982-bf39e8ed3c88" [ 2245.944488] env[63279]: _type = "HttpNfcLease" [ 2245.944488] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2246.054768] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.055545] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.055625] env[63279]: DEBUG nova.objects.instance [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lazy-loading 'resources' on Instance uuid 9a6abf10-ed12-49c6-aa16-f428da9f9f9d {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2246.216503] env[63279]: DEBUG nova.compute.utils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2246.217758] env[63279]: DEBUG nova.network.neutron [-] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2246.220983] env[63279]: DEBUG nova.compute.manager [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2246.221182] env[63279]: DEBUG nova.network.neutron [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2246.235152] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087844, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.254572] env[63279]: DEBUG nova.network.neutron [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2246.264347] env[63279]: INFO nova.compute.manager [-] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Took 2.00 seconds to deallocate network for instance. [ 2246.265765] env[63279]: DEBUG nova.policy [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '149973de87ab4b1e8a012b0390b5486c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7cac88e483444e25aae821157581dd31', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2246.397394] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087846, 'name': Rename_Task, 'duration_secs': 0.156282} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2246.397394] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2246.397900] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e0ac2d4-d23c-4a65-ba7d-44446de4f70e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.400403] env[63279]: DEBUG nova.network.neutron [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Updating instance_info_cache with network_info: [{"id": "d6d02bd7-d44f-434c-8e3f-5bdd56aed938", "address": "fa:16:3e:91:cc:87", "network": {"id": "1ebc2797-412f-4da1-ba28-8b54789f9203", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-665948294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "afe3ab970f3249719809afa9f9b11c1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6d02bd7-d4", "ovs_interfaceid": "d6d02bd7-d44f-434c-8e3f-5bdd56aed938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2246.410256] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2246.410256] env[63279]: value = "task-2087848" [ 2246.410256] env[63279]: _type = "Task" [ 2246.410256] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2246.416734] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087848, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.447926] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2246.447926] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c1360c-f3e8-e79e-0982-bf39e8ed3c88" [ 2246.447926] env[63279]: _type = "HttpNfcLease" [ 2246.447926] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2246.448290] env[63279]: DEBUG oslo_vmware.rw_handles [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2246.448290] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c1360c-f3e8-e79e-0982-bf39e8ed3c88" [ 2246.448290] env[63279]: _type = "HttpNfcLease" [ 2246.448290] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2246.449048] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9145fe-2584-47c5-abe8-89e600336a71 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.457466] env[63279]: DEBUG oslo_vmware.rw_handles [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a4e5f5-e292-af4d-d0d7-f8da47205202/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2246.457665] env[63279]: DEBUG oslo_vmware.rw_handles [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a4e5f5-e292-af4d-d0d7-f8da47205202/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2246.525223] env[63279]: DEBUG nova.network.neutron [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2246.563251] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8ca70aae-8f2d-4d5c-b3ed-762a587d9a96 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.619597] env[63279]: DEBUG nova.compute.manager [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Received event network-vif-plugged-d6d02bd7-d44f-434c-8e3f-5bdd56aed938 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2246.619810] env[63279]: DEBUG oslo_concurrency.lockutils [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] Acquiring lock "21aca39e-8513-49bd-93e9-0d101cee591f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.620084] env[63279]: DEBUG oslo_concurrency.lockutils [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] Lock "21aca39e-8513-49bd-93e9-0d101cee591f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.620258] env[63279]: DEBUG oslo_concurrency.lockutils [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] Lock "21aca39e-8513-49bd-93e9-0d101cee591f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.620425] env[63279]: DEBUG nova.compute.manager [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] No waiting events found dispatching network-vif-plugged-d6d02bd7-d44f-434c-8e3f-5bdd56aed938 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2246.620585] env[63279]: WARNING nova.compute.manager [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Received unexpected event network-vif-plugged-d6d02bd7-d44f-434c-8e3f-5bdd56aed938 for instance with vm_state building and task_state spawning. [ 2246.620741] env[63279]: DEBUG nova.compute.manager [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Received event network-changed-d6d02bd7-d44f-434c-8e3f-5bdd56aed938 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2246.620913] env[63279]: DEBUG nova.compute.manager [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Refreshing instance network info cache due to event network-changed-d6d02bd7-d44f-434c-8e3f-5bdd56aed938. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2246.621127] env[63279]: DEBUG oslo_concurrency.lockutils [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] Acquiring lock "refresh_cache-21aca39e-8513-49bd-93e9-0d101cee591f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2246.639751] env[63279]: DEBUG nova.network.neutron [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Successfully created port: 488726d1-1ee0-451b-be4a-b5de44f13dda {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2246.722116] env[63279]: DEBUG nova.compute.manager [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2246.724986] env[63279]: INFO nova.compute.manager [-] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Took 1.89 seconds to deallocate network for instance. [ 2246.743485] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087844, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.518447} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2246.746508] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 48794f65-355c-4cad-a83f-6b8cc327dd4d/48794f65-355c-4cad-a83f-6b8cc327dd4d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2246.746817] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2246.748112] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bcf0556f-72db-4d66-9a4c-7500784be619 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.757183] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2246.757183] env[63279]: value = "task-2087849" [ 2246.757183] env[63279]: _type = "Task" [ 2246.757183] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2246.771168] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087849, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.780953] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.884629] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19725325-6384-4209-8a06-7bd76c8a67a0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.892477] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc4bcf8-5ed9-4750-8e73-13fbb92ec2de {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.934962] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "1fca4e5c-fe2c-4b61-bed4-52c7770def7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.935421] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "1fca4e5c-fe2c-4b61-bed4-52c7770def7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.937140] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Releasing lock "refresh_cache-21aca39e-8513-49bd-93e9-0d101cee591f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2246.937455] env[63279]: DEBUG nova.compute.manager [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Instance network_info: |[{"id": "d6d02bd7-d44f-434c-8e3f-5bdd56aed938", "address": "fa:16:3e:91:cc:87", "network": {"id": "1ebc2797-412f-4da1-ba28-8b54789f9203", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-665948294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "afe3ab970f3249719809afa9f9b11c1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6d02bd7-d4", "ovs_interfaceid": "d6d02bd7-d44f-434c-8e3f-5bdd56aed938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2246.938951] env[63279]: DEBUG oslo_concurrency.lockutils [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] Acquired lock "refresh_cache-21aca39e-8513-49bd-93e9-0d101cee591f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2246.939930] env[63279]: DEBUG nova.network.neutron [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Refreshing network info cache for port d6d02bd7-d44f-434c-8e3f-5bdd56aed938 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2246.941154] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:cc:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '496faa4d-d874-449b-905e-328ddd60b31b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6d02bd7-d44f-434c-8e3f-5bdd56aed938', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2246.952807] env[63279]: DEBUG oslo.service.loopingcall [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2246.956659] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fe20da-13fe-415e-81c1-283e635f24b6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.960798] env[63279]: DEBUG nova.network.neutron [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Updating instance_info_cache with network_info: [{"id": "3fe8ec9b-e8c9-486c-9d96-785b9a7cb897", "address": "fa:16:3e:16:d7:c4", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3fe8ec9b-e8", "ovs_interfaceid": "3fe8ec9b-e8c9-486c-9d96-785b9a7cb897", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2246.965335] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2246.965844] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82acf33a-c847-4f99-950c-a9b0f9bb0003 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.987793] env[63279]: DEBUG oslo_vmware.api [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087848, 'name': PowerOnVM_Task, 'duration_secs': 0.536425} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2246.991339] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2246.994062] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2246.994062] env[63279]: value = "task-2087850" [ 2246.994062] env[63279]: _type = "Task" [ 2246.994062] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2246.995646] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6c362b-8e66-4967-860a-467c0dd15faf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.018030] env[63279]: DEBUG nova.compute.provider_tree [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2247.021821] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087850, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.118551] env[63279]: DEBUG nova.compute.manager [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2247.119486] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd1093f-5aa3-48c6-8957-ec6f34369a7c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.238899] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2247.269579] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087849, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074053} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2247.269998] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2247.271229] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5229005e-c101-4813-aedc-68d61de1fb2e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.281536] env[63279]: DEBUG nova.compute.manager [req-6aa35a46-693d-4a9b-9ffe-11fe2d815fd6 req-7e7ea7f3-96a3-444c-8eb3-00d698c085f6 service nova] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Received event network-vif-deleted-d31aaccc-4f75-4bc4-898d-0f2680b13372 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2247.282038] env[63279]: DEBUG nova.compute.manager [req-6aa35a46-693d-4a9b-9ffe-11fe2d815fd6 req-7e7ea7f3-96a3-444c-8eb3-00d698c085f6 service nova] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Received event network-changed-3fe8ec9b-e8c9-486c-9d96-785b9a7cb897 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2247.282617] env[63279]: DEBUG nova.compute.manager [req-6aa35a46-693d-4a9b-9ffe-11fe2d815fd6 req-7e7ea7f3-96a3-444c-8eb3-00d698c085f6 service nova] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Refreshing instance network info cache due to event network-changed-3fe8ec9b-e8c9-486c-9d96-785b9a7cb897. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2247.282617] env[63279]: DEBUG oslo_concurrency.lockutils [req-6aa35a46-693d-4a9b-9ffe-11fe2d815fd6 req-7e7ea7f3-96a3-444c-8eb3-00d698c085f6 service nova] Acquiring lock "refresh_cache-6b4a94f9-1cd9-466f-844f-8d692b732abc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2247.306094] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 48794f65-355c-4cad-a83f-6b8cc327dd4d/48794f65-355c-4cad-a83f-6b8cc327dd4d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2247.307285] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b4af911-7ac6-4341-af14-d95056472a43 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.328046] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2247.328046] env[63279]: value = "task-2087851" [ 2247.328046] env[63279]: _type = "Task" [ 2247.328046] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.338204] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087851, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.440198] env[63279]: DEBUG nova.compute.manager [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2247.468807] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "refresh_cache-6b4a94f9-1cd9-466f-844f-8d692b732abc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2247.469480] env[63279]: DEBUG nova.compute.manager [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Instance network_info: |[{"id": "3fe8ec9b-e8c9-486c-9d96-785b9a7cb897", "address": "fa:16:3e:16:d7:c4", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3fe8ec9b-e8", "ovs_interfaceid": "3fe8ec9b-e8c9-486c-9d96-785b9a7cb897", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2247.469863] env[63279]: DEBUG oslo_concurrency.lockutils [req-6aa35a46-693d-4a9b-9ffe-11fe2d815fd6 req-7e7ea7f3-96a3-444c-8eb3-00d698c085f6 service nova] Acquired lock "refresh_cache-6b4a94f9-1cd9-466f-844f-8d692b732abc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2247.470107] env[63279]: DEBUG nova.network.neutron [req-6aa35a46-693d-4a9b-9ffe-11fe2d815fd6 req-7e7ea7f3-96a3-444c-8eb3-00d698c085f6 service nova] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Refreshing network info cache for port 3fe8ec9b-e8c9-486c-9d96-785b9a7cb897 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2247.471691] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:d7:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9fa933df-d66f-485e-8cf9-eda7f1a7f283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3fe8ec9b-e8c9-486c-9d96-785b9a7cb897', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2247.483499] env[63279]: DEBUG oslo.service.loopingcall [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2247.486040] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2247.486717] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-374f4067-7525-483e-bcf3-cb7b7bdd41d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.519461] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087850, 'name': CreateVM_Task, 'duration_secs': 0.480954} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2247.522716] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2247.523009] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2247.523009] env[63279]: value = "task-2087852" [ 2247.523009] env[63279]: _type = "Task" [ 2247.523009] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.523957] env[63279]: DEBUG nova.scheduler.client.report [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2247.527673] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2247.527848] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2247.528489] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2247.529685] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04456b8d-20c3-4306-93ce-1ef32e26f065 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.540278] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2247.540278] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]524ac36e-eb5e-a7e0-0546-5a2de37244bc" [ 2247.540278] env[63279]: _type = "Task" [ 2247.540278] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.544071] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087852, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.556956] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524ac36e-eb5e-a7e0-0546-5a2de37244bc, 'name': SearchDatastore_Task, 'duration_secs': 0.012174} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2247.556956] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2247.557141] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2247.557313] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2247.557631] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2247.557822] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2247.558235] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a1dc410-5be3-4938-a13d-f62e7e9ea11e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.568274] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2247.568885] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2247.569951] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffb509c7-443f-42c2-9138-1b1ad22e448d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.576451] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2247.576451] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5279587c-83cc-4e43-d48d-040a42fdc4fe" [ 2247.576451] env[63279]: _type = "Task" [ 2247.576451] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.585332] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5279587c-83cc-4e43-d48d-040a42fdc4fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.639993] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b04333d3-6e55-46a1-b066-6cbcaa49e26d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 33.618s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2247.733948] env[63279]: DEBUG nova.compute.manager [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2247.763585] env[63279]: DEBUG nova.virt.hardware [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2247.763942] env[63279]: DEBUG nova.virt.hardware [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2247.764135] env[63279]: DEBUG nova.virt.hardware [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2247.764464] env[63279]: DEBUG nova.virt.hardware [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2247.764525] env[63279]: DEBUG nova.virt.hardware [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2247.764680] env[63279]: DEBUG nova.virt.hardware [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2247.764892] env[63279]: DEBUG nova.virt.hardware [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2247.765076] env[63279]: DEBUG nova.virt.hardware [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2247.765277] env[63279]: DEBUG nova.virt.hardware [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2247.765471] env[63279]: DEBUG nova.virt.hardware [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2247.765660] env[63279]: DEBUG nova.virt.hardware [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2247.767071] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1178ef2a-eea5-4263-b794-bbe6f0b458f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.775733] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd964d82-bdb2-46ec-8fca-a0c38f36077f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.792890] env[63279]: DEBUG nova.network.neutron [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Updated VIF entry in instance network info cache for port d6d02bd7-d44f-434c-8e3f-5bdd56aed938. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2247.793293] env[63279]: DEBUG nova.network.neutron [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Updating instance_info_cache with network_info: [{"id": "d6d02bd7-d44f-434c-8e3f-5bdd56aed938", "address": "fa:16:3e:91:cc:87", "network": {"id": "1ebc2797-412f-4da1-ba28-8b54789f9203", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-665948294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "afe3ab970f3249719809afa9f9b11c1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6d02bd7-d4", "ovs_interfaceid": "d6d02bd7-d44f-434c-8e3f-5bdd56aed938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2247.838716] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087851, 'name': ReconfigVM_Task, 'duration_secs': 0.409922} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2247.839015] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 48794f65-355c-4cad-a83f-6b8cc327dd4d/48794f65-355c-4cad-a83f-6b8cc327dd4d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2247.839757] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9d757e5-4523-4e71-b64e-9073a466b9d2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.846423] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2247.846423] env[63279]: value = "task-2087853" [ 2247.846423] env[63279]: _type = "Task" [ 2247.846423] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.855473] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087853, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.964893] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2248.034894] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.978s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2248.038981] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.258s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2248.039100] env[63279]: DEBUG nova.objects.instance [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lazy-loading 'resources' on Instance uuid 0e12ab9b-a701-4e0f-9d96-939090f50494 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2248.047144] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087852, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.063089] env[63279]: INFO nova.scheduler.client.report [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Deleted allocations for instance 9a6abf10-ed12-49c6-aa16-f428da9f9f9d [ 2248.092334] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5279587c-83cc-4e43-d48d-040a42fdc4fe, 'name': SearchDatastore_Task, 'duration_secs': 0.012382} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.092334] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c525c46-4eb6-4948-a68e-19c95fef7785 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.098548] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2248.098548] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a2d5be-b0d6-bd89-5649-df0c1a652237" [ 2248.098548] env[63279]: _type = "Task" [ 2248.098548] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2248.107753] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a2d5be-b0d6-bd89-5649-df0c1a652237, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.244375] env[63279]: DEBUG nova.network.neutron [req-6aa35a46-693d-4a9b-9ffe-11fe2d815fd6 req-7e7ea7f3-96a3-444c-8eb3-00d698c085f6 service nova] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Updated VIF entry in instance network info cache for port 3fe8ec9b-e8c9-486c-9d96-785b9a7cb897. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2248.244375] env[63279]: DEBUG nova.network.neutron [req-6aa35a46-693d-4a9b-9ffe-11fe2d815fd6 req-7e7ea7f3-96a3-444c-8eb3-00d698c085f6 service nova] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Updating instance_info_cache with network_info: [{"id": "3fe8ec9b-e8c9-486c-9d96-785b9a7cb897", "address": "fa:16:3e:16:d7:c4", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3fe8ec9b-e8", "ovs_interfaceid": "3fe8ec9b-e8c9-486c-9d96-785b9a7cb897", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2248.295768] env[63279]: DEBUG oslo_concurrency.lockutils [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] Releasing lock "refresh_cache-21aca39e-8513-49bd-93e9-0d101cee591f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2248.296258] env[63279]: DEBUG nova.compute.manager [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Received event network-vif-deleted-7d3d5ab9-d32b-45e9-a056-b6aa86e777ed {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2248.296591] env[63279]: DEBUG nova.compute.manager [req-4ea5dbc6-03c9-43d5-a871-e6941925d062 req-e68ace08-9d6f-408d-b685-cd798b1c607d service nova] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Received event network-vif-deleted-536b76b2-9f06-4910-aba3-8d499c43d065 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2248.360138] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087853, 'name': Rename_Task, 'duration_secs': 0.191522} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.360138] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2248.360138] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ffaab723-262f-44b9-826e-e54e81ddc27d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.366414] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2248.366414] env[63279]: value = "task-2087854" [ 2248.366414] env[63279]: _type = "Task" [ 2248.366414] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2248.374468] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087854, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.410416] env[63279]: DEBUG nova.network.neutron [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Successfully updated port: 488726d1-1ee0-451b-be4a-b5de44f13dda {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2248.540552] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087852, 'name': CreateVM_Task, 'duration_secs': 0.620076} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.540918] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2248.541523] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2248.541697] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2248.542085] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2248.542387] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19775f3f-fe18-4f50-a8a2-ad1b4257b784 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.550658] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2248.550658] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52e2edfe-b61c-b8ef-41db-d6433b2f384c" [ 2248.550658] env[63279]: _type = "Task" [ 2248.550658] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2248.559474] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e2edfe-b61c-b8ef-41db-d6433b2f384c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.571078] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0ca50dd-6411-451b-b137-4618d6023b66 tempest-ServerDiskConfigTestJSON-726033015 tempest-ServerDiskConfigTestJSON-726033015-project-member] Lock "9a6abf10-ed12-49c6-aa16-f428da9f9f9d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.149s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2248.609939] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a2d5be-b0d6-bd89-5649-df0c1a652237, 'name': SearchDatastore_Task, 'duration_secs': 0.019845} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.613092] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2248.613434] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 21aca39e-8513-49bd-93e9-0d101cee591f/21aca39e-8513-49bd-93e9-0d101cee591f.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2248.614979] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6dc0183e-06ec-4fa1-b534-8281b8d7034e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.621779] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2248.621779] env[63279]: value = "task-2087855" [ 2248.621779] env[63279]: _type = "Task" [ 2248.621779] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2248.633598] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087855, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.746097] env[63279]: DEBUG oslo_concurrency.lockutils [req-6aa35a46-693d-4a9b-9ffe-11fe2d815fd6 req-7e7ea7f3-96a3-444c-8eb3-00d698c085f6 service nova] Releasing lock "refresh_cache-6b4a94f9-1cd9-466f-844f-8d692b732abc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2248.805844] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21830293-5be8-4129-a2b2-dc495a48a5dd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.813937] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40270cf8-04fd-41ff-b6a1-1476fccf8ff5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.846381] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f37adc-946b-4138-b16f-b15a65a66377 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.855048] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06261b2c-9e98-492d-9743-aa78c6c7c2fb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.870983] env[63279]: DEBUG nova.compute.provider_tree [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2248.881677] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087854, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.917784] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Acquiring lock "refresh_cache-ecec02e8-8ddf-4997-9547-ccee0db1938b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2248.917784] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Acquired lock "refresh_cache-ecec02e8-8ddf-4997-9547-ccee0db1938b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2248.917784] env[63279]: DEBUG nova.network.neutron [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2249.061500] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e2edfe-b61c-b8ef-41db-d6433b2f384c, 'name': SearchDatastore_Task, 'duration_secs': 0.031196} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.061777] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2249.062035] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2249.062374] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2249.062478] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2249.062665] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2249.063342] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b26ec64e-53a2-481a-aabd-2cbf70e4160e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.079397] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2249.079548] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2249.080455] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4690b0ba-995b-4fce-9788-33fb4759c2ab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.087416] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2249.087416] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f589f1-ef9f-2fec-6fa6-b045eb233b7b" [ 2249.087416] env[63279]: _type = "Task" [ 2249.087416] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.096499] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f589f1-ef9f-2fec-6fa6-b045eb233b7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.132521] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087855, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493272} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.132812] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 21aca39e-8513-49bd-93e9-0d101cee591f/21aca39e-8513-49bd-93e9-0d101cee591f.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2249.133044] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2249.133315] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-85b8671b-3951-405e-9555-76f837eb5a19 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.141513] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2249.141513] env[63279]: value = "task-2087856" [ 2249.141513] env[63279]: _type = "Task" [ 2249.141513] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.150143] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087856, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.301857] env[63279]: DEBUG nova.compute.manager [req-8ed72ce4-8311-453b-a7cf-658d05d5d536 req-56092e29-83f6-4ab9-9c57-b243449e6cd6 service nova] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Received event network-vif-plugged-488726d1-1ee0-451b-be4a-b5de44f13dda {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2249.302107] env[63279]: DEBUG oslo_concurrency.lockutils [req-8ed72ce4-8311-453b-a7cf-658d05d5d536 req-56092e29-83f6-4ab9-9c57-b243449e6cd6 service nova] Acquiring lock "ecec02e8-8ddf-4997-9547-ccee0db1938b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2249.302337] env[63279]: DEBUG oslo_concurrency.lockutils [req-8ed72ce4-8311-453b-a7cf-658d05d5d536 req-56092e29-83f6-4ab9-9c57-b243449e6cd6 service nova] Lock "ecec02e8-8ddf-4997-9547-ccee0db1938b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2249.302527] env[63279]: DEBUG oslo_concurrency.lockutils [req-8ed72ce4-8311-453b-a7cf-658d05d5d536 req-56092e29-83f6-4ab9-9c57-b243449e6cd6 service nova] Lock "ecec02e8-8ddf-4997-9547-ccee0db1938b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2249.302670] env[63279]: DEBUG nova.compute.manager [req-8ed72ce4-8311-453b-a7cf-658d05d5d536 req-56092e29-83f6-4ab9-9c57-b243449e6cd6 service nova] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] No waiting events found dispatching network-vif-plugged-488726d1-1ee0-451b-be4a-b5de44f13dda {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2249.302843] env[63279]: WARNING nova.compute.manager [req-8ed72ce4-8311-453b-a7cf-658d05d5d536 req-56092e29-83f6-4ab9-9c57-b243449e6cd6 service nova] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Received unexpected event network-vif-plugged-488726d1-1ee0-451b-be4a-b5de44f13dda for instance with vm_state building and task_state spawning. [ 2249.303235] env[63279]: DEBUG nova.compute.manager [req-8ed72ce4-8311-453b-a7cf-658d05d5d536 req-56092e29-83f6-4ab9-9c57-b243449e6cd6 service nova] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Received event network-changed-488726d1-1ee0-451b-be4a-b5de44f13dda {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2249.303235] env[63279]: DEBUG nova.compute.manager [req-8ed72ce4-8311-453b-a7cf-658d05d5d536 req-56092e29-83f6-4ab9-9c57-b243449e6cd6 service nova] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Refreshing instance network info cache due to event network-changed-488726d1-1ee0-451b-be4a-b5de44f13dda. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2249.303312] env[63279]: DEBUG oslo_concurrency.lockutils [req-8ed72ce4-8311-453b-a7cf-658d05d5d536 req-56092e29-83f6-4ab9-9c57-b243449e6cd6 service nova] Acquiring lock "refresh_cache-ecec02e8-8ddf-4997-9547-ccee0db1938b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2249.384208] env[63279]: DEBUG nova.scheduler.client.report [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2249.389850] env[63279]: DEBUG oslo_vmware.api [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087854, 'name': PowerOnVM_Task, 'duration_secs': 0.705002} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.389850] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2249.389850] env[63279]: INFO nova.compute.manager [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Took 15.25 seconds to spawn the instance on the hypervisor. [ 2249.389850] env[63279]: DEBUG nova.compute.manager [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2249.389850] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0751bf53-4b83-47ad-a9bf-52c3b3143b60 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.463723] env[63279]: DEBUG nova.network.neutron [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2249.597799] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f589f1-ef9f-2fec-6fa6-b045eb233b7b, 'name': SearchDatastore_Task, 'duration_secs': 0.018251} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.598592] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29dc0f67-9b8f-43fe-8def-18fe98b4599d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.603414] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2249.603414] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52eca647-8911-3bf5-03a6-02138e387f2d" [ 2249.603414] env[63279]: _type = "Task" [ 2249.603414] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.611112] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52eca647-8911-3bf5-03a6-02138e387f2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.651050] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087856, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06627} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.653407] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2249.654200] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7051b32-f7f3-43a2-b804-3b7f1b923457 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.677103] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 21aca39e-8513-49bd-93e9-0d101cee591f/21aca39e-8513-49bd-93e9-0d101cee591f.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2249.677411] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1630616b-1d99-40a5-a6b0-421206df9491 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.697354] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2249.697354] env[63279]: value = "task-2087857" [ 2249.697354] env[63279]: _type = "Task" [ 2249.697354] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.706088] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087857, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.733816] env[63279]: DEBUG nova.network.neutron [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Updating instance_info_cache with network_info: [{"id": "488726d1-1ee0-451b-be4a-b5de44f13dda", "address": "fa:16:3e:7b:06:25", "network": {"id": "2088b6b8-1dc0-4c45-bfda-49388b0feec2", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1489900725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cac88e483444e25aae821157581dd31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap488726d1-1e", "ovs_interfaceid": "488726d1-1ee0-451b-be4a-b5de44f13dda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2249.893995] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.855s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2249.897771] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.658s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2249.897771] env[63279]: DEBUG nova.objects.instance [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lazy-loading 'resources' on Instance uuid f418ff3d-8a5f-4d78-994a-e40b62cfcdd6 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2249.910946] env[63279]: INFO nova.compute.manager [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Took 24.56 seconds to build instance. [ 2249.928128] env[63279]: INFO nova.scheduler.client.report [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Deleted allocations for instance 0e12ab9b-a701-4e0f-9d96-939090f50494 [ 2250.114926] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52eca647-8911-3bf5-03a6-02138e387f2d, 'name': SearchDatastore_Task, 'duration_secs': 0.010478} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2250.115237] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2250.115499] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6b4a94f9-1cd9-466f-844f-8d692b732abc/6b4a94f9-1cd9-466f-844f-8d692b732abc.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2250.115753] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c58e36c-05c2-45fa-9bef-2f2cb7af6ea9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.124018] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2250.124018] env[63279]: value = "task-2087858" [ 2250.124018] env[63279]: _type = "Task" [ 2250.124018] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.133980] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087858, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.208719] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087857, 'name': ReconfigVM_Task, 'duration_secs': 0.319801} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2250.213019] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 21aca39e-8513-49bd-93e9-0d101cee591f/21aca39e-8513-49bd-93e9-0d101cee591f.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2250.213019] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff2570cf-3660-497b-82fc-cd67d52f4279 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.217739] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2250.217739] env[63279]: value = "task-2087859" [ 2250.217739] env[63279]: _type = "Task" [ 2250.217739] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.231813] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087859, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.239021] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Releasing lock "refresh_cache-ecec02e8-8ddf-4997-9547-ccee0db1938b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2250.239021] env[63279]: DEBUG nova.compute.manager [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Instance network_info: |[{"id": "488726d1-1ee0-451b-be4a-b5de44f13dda", "address": "fa:16:3e:7b:06:25", "network": {"id": "2088b6b8-1dc0-4c45-bfda-49388b0feec2", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1489900725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cac88e483444e25aae821157581dd31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap488726d1-1e", "ovs_interfaceid": "488726d1-1ee0-451b-be4a-b5de44f13dda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2250.239021] env[63279]: DEBUG oslo_concurrency.lockutils [req-8ed72ce4-8311-453b-a7cf-658d05d5d536 req-56092e29-83f6-4ab9-9c57-b243449e6cd6 service nova] Acquired lock "refresh_cache-ecec02e8-8ddf-4997-9547-ccee0db1938b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2250.239021] env[63279]: DEBUG nova.network.neutron [req-8ed72ce4-8311-453b-a7cf-658d05d5d536 req-56092e29-83f6-4ab9-9c57-b243449e6cd6 service nova] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Refreshing network info cache for port 488726d1-1ee0-451b-be4a-b5de44f13dda {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2250.239021] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:06:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3261e15f-7e45-4516-acfd-341bab16e3cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '488726d1-1ee0-451b-be4a-b5de44f13dda', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2250.251037] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Creating folder: Project (7cac88e483444e25aae821157581dd31). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2250.254031] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-170e3df6-28da-44d7-bc6b-6c72630e6a94 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.266619] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Created folder: Project (7cac88e483444e25aae821157581dd31) in parent group-v427491. [ 2250.266838] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Creating folder: Instances. Parent ref: group-v427751. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2250.267132] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f29b668-e92a-47de-be0c-4080913865d4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.277248] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Created folder: Instances in parent group-v427751. [ 2250.277519] env[63279]: DEBUG oslo.service.loopingcall [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2250.277713] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2250.278023] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5714b6bf-37ec-4dd1-8517-347e8c7bfc66 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.301021] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2250.301021] env[63279]: value = "task-2087862" [ 2250.301021] env[63279]: _type = "Task" [ 2250.301021] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.309939] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087862, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.414377] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2000fa86-5fc9-463b-916e-e234ef84002b tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "48794f65-355c-4cad-a83f-6b8cc327dd4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.086s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2250.438522] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7f34f191-f074-43df-82d6-b7c057695f88 tempest-ServersWithSpecificFlavorTestJSON-134149151 tempest-ServersWithSpecificFlavorTestJSON-134149151-project-member] Lock "0e12ab9b-a701-4e0f-9d96-939090f50494" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.200s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2250.603557] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f151c6-82f8-4262-a38f-bbeaca17453b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.613583] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-58362e83-89b9-4eed-a8dd-7e2e874e7d0e tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Suspending the VM {{(pid=63279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2250.614846] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-fa151e9f-ec49-4b7a-a636-8683b56b3358 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.624202] env[63279]: DEBUG oslo_vmware.api [None req-58362e83-89b9-4eed-a8dd-7e2e874e7d0e tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2250.624202] env[63279]: value = "task-2087863" [ 2250.624202] env[63279]: _type = "Task" [ 2250.624202] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.638972] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087858, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.646625] env[63279]: DEBUG oslo_vmware.api [None req-58362e83-89b9-4eed-a8dd-7e2e874e7d0e tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087863, 'name': SuspendVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.730840] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087859, 'name': Rename_Task, 'duration_secs': 0.181387} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2250.731317] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2250.731497] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e57bcf7-0df6-4cb4-bbd9-cd45880ad5b8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.742229] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2250.742229] env[63279]: value = "task-2087864" [ 2250.742229] env[63279]: _type = "Task" [ 2250.742229] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.746449] env[63279]: DEBUG nova.network.neutron [req-8ed72ce4-8311-453b-a7cf-658d05d5d536 req-56092e29-83f6-4ab9-9c57-b243449e6cd6 service nova] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Updated VIF entry in instance network info cache for port 488726d1-1ee0-451b-be4a-b5de44f13dda. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2250.746854] env[63279]: DEBUG nova.network.neutron [req-8ed72ce4-8311-453b-a7cf-658d05d5d536 req-56092e29-83f6-4ab9-9c57-b243449e6cd6 service nova] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Updating instance_info_cache with network_info: [{"id": "488726d1-1ee0-451b-be4a-b5de44f13dda", "address": "fa:16:3e:7b:06:25", "network": {"id": "2088b6b8-1dc0-4c45-bfda-49388b0feec2", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1489900725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cac88e483444e25aae821157581dd31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap488726d1-1e", "ovs_interfaceid": "488726d1-1ee0-451b-be4a-b5de44f13dda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2250.756493] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087864, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.758660] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e491952-fcb8-4e7a-aa49-db41ad4c488f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.767400] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f97731a-70f6-480b-98ee-628a47bf7c38 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.805564] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862c3713-eb38-4056-b9f0-64db8acad8e7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.817470] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33dc14cb-6ff4-4486-8115-e71efb028f5a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.821471] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087862, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.832756] env[63279]: DEBUG nova.compute.provider_tree [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2251.138065] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087858, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608373} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2251.142300] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6b4a94f9-1cd9-466f-844f-8d692b732abc/6b4a94f9-1cd9-466f-844f-8d692b732abc.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2251.142539] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2251.142825] env[63279]: DEBUG oslo_vmware.api [None req-58362e83-89b9-4eed-a8dd-7e2e874e7d0e tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087863, 'name': SuspendVM_Task} progress is 50%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.143080] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75b74548-6825-4b22-9188-92b6fa59d26f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.150625] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2251.150625] env[63279]: value = "task-2087865" [ 2251.150625] env[63279]: _type = "Task" [ 2251.150625] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.160166] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087865, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.253628] env[63279]: DEBUG oslo_concurrency.lockutils [req-8ed72ce4-8311-453b-a7cf-658d05d5d536 req-56092e29-83f6-4ab9-9c57-b243449e6cd6 service nova] Releasing lock "refresh_cache-ecec02e8-8ddf-4997-9547-ccee0db1938b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2251.254166] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087864, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.318402] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087862, 'name': CreateVM_Task, 'duration_secs': 0.615584} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2251.318597] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2251.320734] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2251.320980] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2251.321451] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2251.321791] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93548f94-c512-4205-a2c1-3d0349687309 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.329506] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Waiting for the task: (returnval){ [ 2251.329506] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527aec7d-2ea5-b4e1-434f-2743a9c9398a" [ 2251.329506] env[63279]: _type = "Task" [ 2251.329506] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.337417] env[63279]: DEBUG nova.scheduler.client.report [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2251.350781] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527aec7d-2ea5-b4e1-434f-2743a9c9398a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.639307] env[63279]: DEBUG oslo_vmware.api [None req-58362e83-89b9-4eed-a8dd-7e2e874e7d0e tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087863, 'name': SuspendVM_Task} progress is 50%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.661118] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087865, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098722} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2251.661429] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2251.662238] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd638998-c17c-4f42-af3e-eebff7076ea9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.694205] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 6b4a94f9-1cd9-466f-844f-8d692b732abc/6b4a94f9-1cd9-466f-844f-8d692b732abc.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2251.694205] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4178261-cf67-4a08-9ac3-1a67325d88b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.715447] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2251.715447] env[63279]: value = "task-2087866" [ 2251.715447] env[63279]: _type = "Task" [ 2251.715447] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.724316] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087866, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.753157] env[63279]: DEBUG oslo_vmware.api [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087864, 'name': PowerOnVM_Task, 'duration_secs': 0.632706} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2251.753157] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2251.753322] env[63279]: INFO nova.compute.manager [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Took 10.95 seconds to spawn the instance on the hypervisor. [ 2251.753518] env[63279]: DEBUG nova.compute.manager [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2251.754380] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f67a987-519d-4d9a-aebd-133181113f50 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.842384] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527aec7d-2ea5-b4e1-434f-2743a9c9398a, 'name': SearchDatastore_Task, 'duration_secs': 0.019397} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2251.842707] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2251.842967] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2251.843208] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2251.843362] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2251.843543] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2251.844634] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.948s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2251.846611] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c40862b-22b6-4b99-a883-146f8469bce8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.849071] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.884s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2251.850991] env[63279]: INFO nova.compute.claims [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2251.863086] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2251.863320] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2251.864470] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54b6ca9c-9ce3-46fc-990f-b9605d5e5856 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.873174] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Waiting for the task: (returnval){ [ 2251.873174] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a8a850-4ad0-8f45-5968-1f152620e78f" [ 2251.873174] env[63279]: _type = "Task" [ 2251.873174] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.878262] env[63279]: INFO nova.scheduler.client.report [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Deleted allocations for instance f418ff3d-8a5f-4d78-994a-e40b62cfcdd6 [ 2251.887491] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a8a850-4ad0-8f45-5968-1f152620e78f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.140370] env[63279]: DEBUG oslo_vmware.api [None req-58362e83-89b9-4eed-a8dd-7e2e874e7d0e tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087863, 'name': SuspendVM_Task, 'duration_secs': 1.105209} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.140653] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-58362e83-89b9-4eed-a8dd-7e2e874e7d0e tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Suspended the VM {{(pid=63279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2252.140835] env[63279]: DEBUG nova.compute.manager [None req-58362e83-89b9-4eed-a8dd-7e2e874e7d0e tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2252.142020] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26789e65-22fd-46b6-b3e7-ac01631611bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.226080] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087866, 'name': ReconfigVM_Task, 'duration_secs': 0.312432} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.226395] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 6b4a94f9-1cd9-466f-844f-8d692b732abc/6b4a94f9-1cd9-466f-844f-8d692b732abc.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2252.227015] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f58db231-e886-4da3-9e07-5c884c4b1de7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.236130] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2252.236130] env[63279]: value = "task-2087867" [ 2252.236130] env[63279]: _type = "Task" [ 2252.236130] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2252.242744] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087867, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.272047] env[63279]: INFO nova.compute.manager [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Took 16.33 seconds to build instance. [ 2252.383384] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a8a850-4ad0-8f45-5968-1f152620e78f, 'name': SearchDatastore_Task, 'duration_secs': 0.020441} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.384156] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a632d6c-bafb-4c0c-8b55-8bb77b49d8f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.391994] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Waiting for the task: (returnval){ [ 2252.391994] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d270aa-46dc-76d9-014b-77004bcaa678" [ 2252.391994] env[63279]: _type = "Task" [ 2252.391994] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2252.392984] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f1b70e3-4a9e-4d3a-9198-bdea7c3cc57e tempest-ServersTestMultiNic-465204663 tempest-ServersTestMultiNic-465204663-project-member] Lock "f418ff3d-8a5f-4d78-994a-e40b62cfcdd6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.835s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2252.402225] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d270aa-46dc-76d9-014b-77004bcaa678, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.744051] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087867, 'name': Rename_Task, 'duration_secs': 0.14529} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.745127] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2252.745940] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d020cdf-4e7d-4133-bff7-55ec1638f4df {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.752060] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2252.752060] env[63279]: value = "task-2087868" [ 2252.752060] env[63279]: _type = "Task" [ 2252.752060] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2252.761588] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087868, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.774349] env[63279]: DEBUG oslo_concurrency.lockutils [None req-36db34e3-d814-4eaf-b698-fad1b0a7fb81 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "21aca39e-8513-49bd-93e9-0d101cee591f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.839s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2252.909499] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d270aa-46dc-76d9-014b-77004bcaa678, 'name': SearchDatastore_Task, 'duration_secs': 0.025852} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.913315] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2252.913595] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ecec02e8-8ddf-4997-9547-ccee0db1938b/ecec02e8-8ddf-4997-9547-ccee0db1938b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2252.914102] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-400d2079-4450-4ea0-8fda-b547c4285225 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.925255] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Waiting for the task: (returnval){ [ 2252.925255] env[63279]: value = "task-2087869" [ 2252.925255] env[63279]: _type = "Task" [ 2252.925255] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2252.939580] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087869, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2253.127857] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fca51a3-ae96-4417-b28e-917e96fa6bce {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.136549] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23311345-8704-4bcd-8963-9fba30528c12 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.181108] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "48794f65-355c-4cad-a83f-6b8cc327dd4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2253.181422] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "48794f65-355c-4cad-a83f-6b8cc327dd4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2253.181636] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "48794f65-355c-4cad-a83f-6b8cc327dd4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2253.181890] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "48794f65-355c-4cad-a83f-6b8cc327dd4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2253.182156] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "48794f65-355c-4cad-a83f-6b8cc327dd4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2253.186051] env[63279]: INFO nova.compute.manager [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Terminating instance [ 2253.193119] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2c4418-1b32-4bbf-88f5-1c768ad831b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.199314] env[63279]: DEBUG nova.compute.manager [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2253.199544] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2253.201361] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b47984e-8374-455a-a61d-18e64efddcee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.213070] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea69d11-c71a-4e5f-933f-eb42300d5535 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.217434] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2253.218170] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f797d4c-15cf-48e9-bda5-c78cf4300013 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.230279] env[63279]: DEBUG nova.compute.provider_tree [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2253.263469] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087868, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2253.424282] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2253.424670] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2253.424745] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleting the datastore file [datastore1] 48794f65-355c-4cad-a83f-6b8cc327dd4d {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2253.425022] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88e5aa1d-e7a6-4a7a-adbf-73e3c4cd56cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.435656] env[63279]: DEBUG oslo_vmware.api [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2253.435656] env[63279]: value = "task-2087871" [ 2253.435656] env[63279]: _type = "Task" [ 2253.435656] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2253.441515] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087869, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2253.446882] env[63279]: DEBUG oslo_vmware.api [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087871, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2253.701123] env[63279]: INFO nova.compute.manager [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Rescuing [ 2253.701123] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "refresh_cache-21aca39e-8513-49bd-93e9-0d101cee591f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2253.701123] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquired lock "refresh_cache-21aca39e-8513-49bd-93e9-0d101cee591f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2253.701123] env[63279]: DEBUG nova.network.neutron [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2253.733932] env[63279]: DEBUG nova.scheduler.client.report [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2253.768740] env[63279]: DEBUG oslo_vmware.api [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087868, 'name': PowerOnVM_Task, 'duration_secs': 0.71402} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2253.769070] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2253.769315] env[63279]: INFO nova.compute.manager [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Took 17.23 seconds to spawn the instance on the hypervisor. [ 2253.769531] env[63279]: DEBUG nova.compute.manager [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2253.770426] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5baf49f-afad-4870-8184-0cf8698a71cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.942680] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087869, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.705614} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2253.943545] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] ecec02e8-8ddf-4997-9547-ccee0db1938b/ecec02e8-8ddf-4997-9547-ccee0db1938b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2253.943913] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2253.944291] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70e98339-8448-4faf-ba76-90ddd53117cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.949998] env[63279]: DEBUG oslo_vmware.api [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087871, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.48079} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2253.950705] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2253.950917] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2253.951116] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2253.952357] env[63279]: INFO nova.compute.manager [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Took 0.75 seconds to destroy the instance on the hypervisor. [ 2253.952357] env[63279]: DEBUG oslo.service.loopingcall [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2253.952357] env[63279]: DEBUG nova.compute.manager [-] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2253.952357] env[63279]: DEBUG nova.network.neutron [-] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2253.956301] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Waiting for the task: (returnval){ [ 2253.956301] env[63279]: value = "task-2087872" [ 2253.956301] env[63279]: _type = "Task" [ 2253.956301] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2253.966929] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087872, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2254.247994] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.397s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2254.248631] env[63279]: DEBUG nova.compute.manager [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2254.307649] env[63279]: INFO nova.compute.manager [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Took 28.16 seconds to build instance. [ 2254.330164] env[63279]: DEBUG oslo_concurrency.lockutils [None req-742a8660-b63a-44e4-a0ed-bea4b44cb26a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "interface-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2254.330164] env[63279]: DEBUG oslo_concurrency.lockutils [None req-742a8660-b63a-44e4-a0ed-bea4b44cb26a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2254.330303] env[63279]: DEBUG nova.objects.instance [None req-742a8660-b63a-44e4-a0ed-bea4b44cb26a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'flavor' on Instance uuid 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2254.468299] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087872, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096073} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2254.472026] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2254.472026] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77624fd-6c6d-45be-b124-8becb39cb4ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.493479] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] ecec02e8-8ddf-4997-9547-ccee0db1938b/ecec02e8-8ddf-4997-9547-ccee0db1938b.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2254.493894] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0b66c6a-9f0a-4fcb-993c-8558eb5be7e9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.523066] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Waiting for the task: (returnval){ [ 2254.523066] env[63279]: value = "task-2087873" [ 2254.523066] env[63279]: _type = "Task" [ 2254.523066] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2254.539471] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087873, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2254.755206] env[63279]: DEBUG nova.compute.utils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2254.756539] env[63279]: DEBUG nova.compute.manager [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2254.756652] env[63279]: DEBUG nova.network.neutron [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2254.811970] env[63279]: DEBUG oslo_concurrency.lockutils [None req-17cc2163-e46e-46a7-8e0b-8af430363191 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "6b4a94f9-1cd9-466f-844f-8d692b732abc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.690s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2254.828704] env[63279]: DEBUG nova.policy [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae6ae670025f406e8bd0362124749c43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f39174e9ff5649e0ade4391da383dfb2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2254.836648] env[63279]: DEBUG nova.objects.instance [None req-742a8660-b63a-44e4-a0ed-bea4b44cb26a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'pci_requests' on Instance uuid 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2254.860983] env[63279]: DEBUG nova.compute.manager [req-2205ba36-d0e7-42d2-bd63-2ba8e63c9dcf req-283fac71-34fc-4200-a9e7-761f7f2b2092 service nova] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Received event network-vif-deleted-29fc0fdf-41a6-471b-a08c-4e62bd3d372b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2254.861198] env[63279]: INFO nova.compute.manager [req-2205ba36-d0e7-42d2-bd63-2ba8e63c9dcf req-283fac71-34fc-4200-a9e7-761f7f2b2092 service nova] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Neutron deleted interface 29fc0fdf-41a6-471b-a08c-4e62bd3d372b; detaching it from the instance and deleting it from the info cache [ 2254.861408] env[63279]: DEBUG nova.network.neutron [req-2205ba36-d0e7-42d2-bd63-2ba8e63c9dcf req-283fac71-34fc-4200-a9e7-761f7f2b2092 service nova] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2254.867668] env[63279]: DEBUG nova.network.neutron [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Updating instance_info_cache with network_info: [{"id": "d6d02bd7-d44f-434c-8e3f-5bdd56aed938", "address": "fa:16:3e:91:cc:87", "network": {"id": "1ebc2797-412f-4da1-ba28-8b54789f9203", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-665948294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "afe3ab970f3249719809afa9f9b11c1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6d02bd7-d4", "ovs_interfaceid": "d6d02bd7-d44f-434c-8e3f-5bdd56aed938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2254.938818] env[63279]: DEBUG nova.network.neutron [-] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2255.033430] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087873, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2255.162506] env[63279]: DEBUG nova.network.neutron [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Successfully created port: 1e0ac67c-3039-4c36-831a-d32977fcab32 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2255.262777] env[63279]: DEBUG nova.compute.manager [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2255.339542] env[63279]: DEBUG nova.objects.base [None req-742a8660-b63a-44e4-a0ed-bea4b44cb26a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Object Instance<9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6> lazy-loaded attributes: flavor,pci_requests {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2255.339799] env[63279]: DEBUG nova.network.neutron [None req-742a8660-b63a-44e4-a0ed-bea4b44cb26a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2255.364947] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-34595340-ccad-42e0-9217-be4396280a8c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.372096] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Releasing lock "refresh_cache-21aca39e-8513-49bd-93e9-0d101cee591f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2255.381110] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6cbc9cf-eb12-444c-ab32-22324c0c2ef5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.424854] env[63279]: DEBUG nova.compute.manager [req-2205ba36-d0e7-42d2-bd63-2ba8e63c9dcf req-283fac71-34fc-4200-a9e7-761f7f2b2092 service nova] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Detach interface failed, port_id=29fc0fdf-41a6-471b-a08c-4e62bd3d372b, reason: Instance 48794f65-355c-4cad-a83f-6b8cc327dd4d could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2255.443210] env[63279]: INFO nova.compute.manager [-] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Took 1.49 seconds to deallocate network for instance. [ 2255.537051] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087873, 'name': ReconfigVM_Task, 'duration_secs': 0.69657} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2255.537479] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Reconfigured VM instance instance-00000060 to attach disk [datastore1] ecec02e8-8ddf-4997-9547-ccee0db1938b/ecec02e8-8ddf-4997-9547-ccee0db1938b.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2255.538583] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51457034-4eab-4b39-af3c-0d62847f205d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.547928] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Waiting for the task: (returnval){ [ 2255.547928] env[63279]: value = "task-2087874" [ 2255.547928] env[63279]: _type = "Task" [ 2255.547928] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2255.563849] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087874, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2255.599144] env[63279]: DEBUG oslo_concurrency.lockutils [None req-742a8660-b63a-44e4-a0ed-bea4b44cb26a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.269s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2255.952262] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2255.952813] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2255.952983] env[63279]: DEBUG nova.objects.instance [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lazy-loading 'resources' on Instance uuid 48794f65-355c-4cad-a83f-6b8cc327dd4d {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2256.067601] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087874, 'name': Rename_Task, 'duration_secs': 0.214453} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2256.067971] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2256.068308] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-902bdeb4-c4d1-4f24-a2ef-a3b936293918 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.078104] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Waiting for the task: (returnval){ [ 2256.078104] env[63279]: value = "task-2087875" [ 2256.078104] env[63279]: _type = "Task" [ 2256.078104] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2256.090045] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087875, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2256.277590] env[63279]: DEBUG nova.compute.manager [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2256.312130] env[63279]: DEBUG nova.virt.hardware [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2256.312419] env[63279]: DEBUG nova.virt.hardware [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2256.312584] env[63279]: DEBUG nova.virt.hardware [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2256.312784] env[63279]: DEBUG nova.virt.hardware [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2256.312936] env[63279]: DEBUG nova.virt.hardware [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2256.313681] env[63279]: DEBUG nova.virt.hardware [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2256.313959] env[63279]: DEBUG nova.virt.hardware [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2256.314272] env[63279]: DEBUG nova.virt.hardware [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2256.314512] env[63279]: DEBUG nova.virt.hardware [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2256.314697] env[63279]: DEBUG nova.virt.hardware [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2256.314902] env[63279]: DEBUG nova.virt.hardware [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2256.315876] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f685cb-af28-4ba1-8a2f-4a3158ef620c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.326056] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3badcd-67a1-414a-bb12-4429673077a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.590452] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087875, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2256.693053] env[63279]: DEBUG oslo_vmware.rw_handles [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a4e5f5-e292-af4d-d0d7-f8da47205202/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2256.695303] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b74fba8-2dd9-44ae-a213-e7bb32c3ba90 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.710969] env[63279]: DEBUG oslo_vmware.rw_handles [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a4e5f5-e292-af4d-d0d7-f8da47205202/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2256.711323] env[63279]: ERROR oslo_vmware.rw_handles [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a4e5f5-e292-af4d-d0d7-f8da47205202/disk-0.vmdk due to incomplete transfer. [ 2256.715432] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-427377f8-2e29-46db-b4b2-0a0fa3214489 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.728203] env[63279]: DEBUG oslo_vmware.rw_handles [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a4e5f5-e292-af4d-d0d7-f8da47205202/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2256.728297] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Uploaded image fbac5115-107e-4a20-ba81-7d5fa6c21bc8 to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2256.730899] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2256.731265] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d65fba4a-e572-4086-9a24-804ef8bdfd7f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.741258] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2256.741258] env[63279]: value = "task-2087876" [ 2256.741258] env[63279]: _type = "Task" [ 2256.741258] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2256.751038] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087876, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2256.791067] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e46b7f0-4ab5-485a-b271-5f0ae85b3eb8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.804657] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7303d567-db07-40b0-b944-853f9978fb9e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.837699] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c600404-6d60-4dc5-882d-7984aa10b237 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.846800] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190e3a44-52b1-488d-922b-2ce44d424cb6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.863757] env[63279]: DEBUG nova.compute.provider_tree [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2256.880030] env[63279]: DEBUG nova.network.neutron [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Successfully updated port: 1e0ac67c-3039-4c36-831a-d32977fcab32 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2256.939388] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2256.941424] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d674d72-10d8-4aa7-a17d-cd0aac454763 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.948155] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2256.948155] env[63279]: value = "task-2087877" [ 2256.948155] env[63279]: _type = "Task" [ 2256.948155] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2256.957757] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087877, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2257.001142] env[63279]: DEBUG oslo_concurrency.lockutils [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "6b4a94f9-1cd9-466f-844f-8d692b732abc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2257.003166] env[63279]: DEBUG oslo_concurrency.lockutils [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "6b4a94f9-1cd9-466f-844f-8d692b732abc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2257.003166] env[63279]: DEBUG oslo_concurrency.lockutils [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "6b4a94f9-1cd9-466f-844f-8d692b732abc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2257.003166] env[63279]: DEBUG oslo_concurrency.lockutils [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "6b4a94f9-1cd9-466f-844f-8d692b732abc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2257.003166] env[63279]: DEBUG oslo_concurrency.lockutils [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "6b4a94f9-1cd9-466f-844f-8d692b732abc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2257.004458] env[63279]: INFO nova.compute.manager [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Terminating instance [ 2257.097295] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087875, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2257.194909] env[63279]: DEBUG nova.compute.manager [req-04370a55-1df4-4d5d-b41e-38cf38dc6e2f req-f0691a4a-1edb-4757-a599-e6fe8e014120 service nova] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Received event network-vif-plugged-1e0ac67c-3039-4c36-831a-d32977fcab32 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2257.195068] env[63279]: DEBUG oslo_concurrency.lockutils [req-04370a55-1df4-4d5d-b41e-38cf38dc6e2f req-f0691a4a-1edb-4757-a599-e6fe8e014120 service nova] Acquiring lock "1fca4e5c-fe2c-4b61-bed4-52c7770def7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2257.195638] env[63279]: DEBUG oslo_concurrency.lockutils [req-04370a55-1df4-4d5d-b41e-38cf38dc6e2f req-f0691a4a-1edb-4757-a599-e6fe8e014120 service nova] Lock "1fca4e5c-fe2c-4b61-bed4-52c7770def7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2257.195638] env[63279]: DEBUG oslo_concurrency.lockutils [req-04370a55-1df4-4d5d-b41e-38cf38dc6e2f req-f0691a4a-1edb-4757-a599-e6fe8e014120 service nova] Lock "1fca4e5c-fe2c-4b61-bed4-52c7770def7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2257.195824] env[63279]: DEBUG nova.compute.manager [req-04370a55-1df4-4d5d-b41e-38cf38dc6e2f req-f0691a4a-1edb-4757-a599-e6fe8e014120 service nova] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] No waiting events found dispatching network-vif-plugged-1e0ac67c-3039-4c36-831a-d32977fcab32 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2257.195868] env[63279]: WARNING nova.compute.manager [req-04370a55-1df4-4d5d-b41e-38cf38dc6e2f req-f0691a4a-1edb-4757-a599-e6fe8e014120 service nova] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Received unexpected event network-vif-plugged-1e0ac67c-3039-4c36-831a-d32977fcab32 for instance with vm_state building and task_state spawning. [ 2257.251465] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087876, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2257.370021] env[63279]: DEBUG nova.scheduler.client.report [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2257.388463] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-1fca4e5c-fe2c-4b61-bed4-52c7770def7c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2257.388463] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-1fca4e5c-fe2c-4b61-bed4-52c7770def7c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2257.388463] env[63279]: DEBUG nova.network.neutron [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2257.458611] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087877, 'name': PowerOffVM_Task, 'duration_secs': 0.404096} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2257.458976] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2257.459901] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a78e27-85d8-4305-87de-f1f72ea6c715 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.481497] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1ff4c5-862a-4e3e-920a-a9e1c5f1d559 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.510429] env[63279]: DEBUG nova.compute.manager [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2257.510727] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2257.512079] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5dc4cf-d263-4906-a0ce-5f9103f6b376 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.517717] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2257.518000] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a38a1914-274e-438b-9f64-3e104dc8328d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.524129] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2257.525317] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a67be80-78f0-4ccb-8da7-bc2bf1a5b63d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.526835] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2257.526835] env[63279]: value = "task-2087878" [ 2257.526835] env[63279]: _type = "Task" [ 2257.526835] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2257.531047] env[63279]: DEBUG oslo_vmware.api [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2257.531047] env[63279]: value = "task-2087879" [ 2257.531047] env[63279]: _type = "Task" [ 2257.531047] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2257.538719] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] VM already powered off {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2257.538924] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2257.539183] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2257.539335] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2257.539514] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2257.540084] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9dc300c8-476a-45dd-99d3-8e2c1a640ea0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.544645] env[63279]: DEBUG oslo_vmware.api [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087879, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2257.551421] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2257.551624] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2257.552522] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65bb15d6-bebe-42a7-aa5f-6ad94016f843 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.559633] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2257.559633] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52766f33-ab6a-14ec-5255-40af3808aebe" [ 2257.559633] env[63279]: _type = "Task" [ 2257.559633] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2257.567253] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52766f33-ab6a-14ec-5255-40af3808aebe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2257.590606] env[63279]: DEBUG oslo_vmware.api [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087875, 'name': PowerOnVM_Task, 'duration_secs': 1.224119} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2257.591028] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2257.591278] env[63279]: INFO nova.compute.manager [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Took 9.86 seconds to spawn the instance on the hypervisor. [ 2257.591548] env[63279]: DEBUG nova.compute.manager [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2257.592375] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9179f1be-1bb4-466d-b9c6-17783bbf93ed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.627142] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "interface-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2257.627447] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2257.627808] env[63279]: DEBUG nova.objects.instance [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'flavor' on Instance uuid 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2257.714870] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2257.715163] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2257.754611] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087876, 'name': Destroy_Task, 'duration_secs': 0.771558} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2257.755012] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Destroyed the VM [ 2257.755630] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2257.755974] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cb040b3f-0449-4a5e-b486-137fa526a258 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.763910] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2257.763910] env[63279]: value = "task-2087880" [ 2257.763910] env[63279]: _type = "Task" [ 2257.763910] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2257.774959] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087880, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2257.872723] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.920s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2257.897096] env[63279]: INFO nova.scheduler.client.report [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleted allocations for instance 48794f65-355c-4cad-a83f-6b8cc327dd4d [ 2257.924786] env[63279]: DEBUG nova.network.neutron [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2258.031710] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquiring lock "69c88844-84c7-4272-a2c4-051f1499df84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2258.032135] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Lock "69c88844-84c7-4272-a2c4-051f1499df84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2258.048177] env[63279]: DEBUG oslo_vmware.api [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087879, 'name': PowerOffVM_Task, 'duration_secs': 0.273889} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2258.049602] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2258.049973] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2258.050598] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-455d82c4-4f7e-4cea-83f9-98c668a26793 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.072803] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52766f33-ab6a-14ec-5255-40af3808aebe, 'name': SearchDatastore_Task, 'duration_secs': 0.008421} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2258.074200] env[63279]: DEBUG nova.network.neutron [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Updating instance_info_cache with network_info: [{"id": "1e0ac67c-3039-4c36-831a-d32977fcab32", "address": "fa:16:3e:85:8f:65", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e0ac67c-30", "ovs_interfaceid": "1e0ac67c-3039-4c36-831a-d32977fcab32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2258.077342] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dae944d7-e7b7-41e6-a9b6-c44c9d1869e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.085220] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2258.085220] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]528f4d63-1746-9fc5-6672-d30a98da39d7" [ 2258.085220] env[63279]: _type = "Task" [ 2258.085220] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2258.099790] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]528f4d63-1746-9fc5-6672-d30a98da39d7, 'name': SearchDatastore_Task, 'duration_secs': 0.008924} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2258.100214] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2258.100626] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 21aca39e-8513-49bd-93e9-0d101cee591f/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk. {{(pid=63279) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2258.101025] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab9baaf5-ae8b-4d43-a54b-acb6a6c08731 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.113855] env[63279]: INFO nova.compute.manager [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Took 14.80 seconds to build instance. [ 2258.117396] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2258.117396] env[63279]: value = "task-2087882" [ 2258.117396] env[63279]: _type = "Task" [ 2258.117396] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2258.129657] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087882, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2258.207443] env[63279]: DEBUG nova.objects.instance [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'pci_requests' on Instance uuid 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2258.217940] env[63279]: DEBUG nova.compute.manager [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2258.233651] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2258.233933] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2258.234256] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleting the datastore file [datastore1] 6b4a94f9-1cd9-466f-844f-8d692b732abc {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2258.234914] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60ed006d-1e4e-4cdc-85f3-906b655fed20 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.242046] env[63279]: DEBUG oslo_vmware.api [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2258.242046] env[63279]: value = "task-2087883" [ 2258.242046] env[63279]: _type = "Task" [ 2258.242046] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2258.250624] env[63279]: DEBUG oslo_vmware.api [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087883, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2258.275299] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087880, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2258.404173] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dde7fd1f-86b5-499d-9f4d-4086dce5e4c7 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "48794f65-355c-4cad-a83f-6b8cc327dd4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.223s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2258.535738] env[63279]: DEBUG nova.compute.manager [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2258.577940] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-1fca4e5c-fe2c-4b61-bed4-52c7770def7c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2258.578329] env[63279]: DEBUG nova.compute.manager [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Instance network_info: |[{"id": "1e0ac67c-3039-4c36-831a-d32977fcab32", "address": "fa:16:3e:85:8f:65", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e0ac67c-30", "ovs_interfaceid": "1e0ac67c-3039-4c36-831a-d32977fcab32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2258.579099] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:8f:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57d4be17-536f-4a81-bea9-6547bd50f4a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e0ac67c-3039-4c36-831a-d32977fcab32', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2258.586849] env[63279]: DEBUG oslo.service.loopingcall [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2258.587372] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2258.587620] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9af97d77-8675-41cc-a391-ef1e938a722e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.610679] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2258.610679] env[63279]: value = "task-2087884" [ 2258.610679] env[63279]: _type = "Task" [ 2258.610679] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2258.619180] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d473d10f-3835-4a22-8b4b-615c9316d66e tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Lock "ecec02e8-8ddf-4997-9547-ccee0db1938b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.316s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2258.619480] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087884, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2258.627125] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087882, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460964} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2258.627399] env[63279]: INFO nova.virt.vmwareapi.ds_util [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 21aca39e-8513-49bd-93e9-0d101cee591f/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk. [ 2258.628194] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ece006-a534-4add-99e8-e473c8010017 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.654923] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 21aca39e-8513-49bd-93e9-0d101cee591f/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2258.656032] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19b2127a-6b61-490c-b57f-42ac86e34d9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.674922] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2258.674922] env[63279]: value = "task-2087885" [ 2258.674922] env[63279]: _type = "Task" [ 2258.674922] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2258.683336] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087885, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2258.710480] env[63279]: DEBUG nova.objects.base [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Object Instance<9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6> lazy-loaded attributes: flavor,pci_requests {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2258.710623] env[63279]: DEBUG nova.network.neutron [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2258.740159] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2258.740159] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2258.741694] env[63279]: INFO nova.compute.claims [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2258.754313] env[63279]: DEBUG oslo_vmware.api [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087883, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.400205} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2258.756131] env[63279]: DEBUG nova.policy [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6343b207f7294f5fa2a8111940083fb0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5b21bc5072e4945a19a782dd9561709', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2258.757682] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2258.757905] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2258.758140] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2258.758333] env[63279]: INFO nova.compute.manager [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Took 1.25 seconds to destroy the instance on the hypervisor. [ 2258.759523] env[63279]: DEBUG oslo.service.loopingcall [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2258.759523] env[63279]: DEBUG nova.compute.manager [-] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2258.759523] env[63279]: DEBUG nova.network.neutron [-] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2258.773721] env[63279]: DEBUG oslo_vmware.api [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087880, 'name': RemoveSnapshot_Task, 'duration_secs': 0.573733} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2258.773979] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2258.774249] env[63279]: INFO nova.compute.manager [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Took 17.01 seconds to snapshot the instance on the hypervisor. [ 2259.018615] env[63279]: DEBUG nova.network.neutron [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Successfully created port: f3d164be-d41c-4e5a-97f5-a50cb539a06a {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2259.031202] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "9b98a316-71da-45fb-b895-553f179fe7d9" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2259.031452] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "9b98a316-71da-45fb-b895-553f179fe7d9" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2259.061761] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2259.121475] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087884, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.185346] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087885, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.205185] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2259.205185] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2259.329646] env[63279]: DEBUG nova.compute.manager [None req-cfa8c991-7512-49ef-9c5d-e5ffe702877c tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Found 1 images (rotation: 2) {{(pid=63279) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 2259.534908] env[63279]: DEBUG nova.compute.utils [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2259.565740] env[63279]: DEBUG nova.network.neutron [-] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2259.621473] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087884, 'name': CreateVM_Task, 'duration_secs': 0.63457} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2259.621651] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2259.622375] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2259.622575] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2259.622956] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2259.623261] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4012d9d5-a6b6-4ecd-ba52-e93832ce5b48 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.628323] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2259.628323] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5282c5e8-c7f3-d0f5-8f5e-711cc698aaac" [ 2259.628323] env[63279]: _type = "Task" [ 2259.628323] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2259.635891] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5282c5e8-c7f3-d0f5-8f5e-711cc698aaac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.685444] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087885, 'name': ReconfigVM_Task, 'duration_secs': 0.715747} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2259.685808] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 21aca39e-8513-49bd-93e9-0d101cee591f/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2259.686712] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b445a36b-312a-4008-92cd-7663e1b3f4b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.706559] env[63279]: DEBUG nova.compute.manager [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2259.714580] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e6eda6c-f859-4027-9ad7-6354c77070a8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.730374] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2259.730374] env[63279]: value = "task-2087886" [ 2259.730374] env[63279]: _type = "Task" [ 2259.730374] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2259.738654] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087886, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.995788] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abcb461a-2c2c-4bf7-bc21-c560298dd709 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.003812] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c83f18-3e78-4f87-9d99-73cd97d88832 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.035721] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb27010f-19ca-4e50-8f45-71662b04bdb3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.038721] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "9b98a316-71da-45fb-b895-553f179fe7d9" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2260.044411] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e613df-4a2f-4323-a2c4-aea7b7d4758c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.058278] env[63279]: DEBUG nova.compute.provider_tree [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2260.067697] env[63279]: INFO nova.compute.manager [-] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Took 1.31 seconds to deallocate network for instance. [ 2260.138475] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5282c5e8-c7f3-d0f5-8f5e-711cc698aaac, 'name': SearchDatastore_Task, 'duration_secs': 0.040528} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2260.138803] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2260.139067] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2260.139329] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2260.139491] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2260.139676] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2260.139978] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1c0658a-8c16-4f25-8a1c-7c9f517c1ffe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.148073] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2260.148314] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2260.149108] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f5cf23e-c720-45d5-9873-ecd12bf32426 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.154804] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2260.154804] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5223a3f2-2836-94da-b4a6-cb884b5249f1" [ 2260.154804] env[63279]: _type = "Task" [ 2260.154804] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2260.162957] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5223a3f2-2836-94da-b4a6-cb884b5249f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.224636] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2260.240661] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087886, 'name': ReconfigVM_Task, 'duration_secs': 0.184591} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2260.240952] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2260.241244] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b658077b-e54a-43e3-915d-087845edaf0f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.247887] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2260.247887] env[63279]: value = "task-2087887" [ 2260.247887] env[63279]: _type = "Task" [ 2260.247887] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2260.265199] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.561297] env[63279]: DEBUG nova.scheduler.client.report [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2260.573524] env[63279]: DEBUG oslo_concurrency.lockutils [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2260.665045] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5223a3f2-2836-94da-b4a6-cb884b5249f1, 'name': SearchDatastore_Task, 'duration_secs': 0.009149} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2260.665823] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a11b38d7-7a4c-4374-b73b-941ff3af56da {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.671090] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2260.671090] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]528dcfea-a5f2-53e9-a6a9-4f4593632a12" [ 2260.671090] env[63279]: _type = "Task" [ 2260.671090] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2260.679200] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]528dcfea-a5f2-53e9-a6a9-4f4593632a12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.757514] env[63279]: DEBUG oslo_vmware.api [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087887, 'name': PowerOnVM_Task, 'duration_secs': 0.39021} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2260.757780] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2260.760464] env[63279]: DEBUG nova.compute.manager [None req-6fa2bb5f-68d4-402b-bc5d-e41c6196d5ed tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2260.761305] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9542960d-1636-4c9f-bfc1-b967f2629baa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.829384] env[63279]: DEBUG nova.compute.manager [req-329579c4-a2bc-4498-9cb3-41c5e125a15e req-b9c796cd-33b3-4fbf-b48e-66a66871917e service nova] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Received event network-vif-deleted-3fe8ec9b-e8c9-486c-9d96-785b9a7cb897 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2260.893315] env[63279]: DEBUG nova.compute.manager [req-5733de92-bb6f-44a3-819d-26368379f54f req-4bb85da6-315b-4aad-873e-6936ce99dea3 service nova] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Received event network-changed-1e0ac67c-3039-4c36-831a-d32977fcab32 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2260.893823] env[63279]: DEBUG nova.compute.manager [req-5733de92-bb6f-44a3-819d-26368379f54f req-4bb85da6-315b-4aad-873e-6936ce99dea3 service nova] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Refreshing instance network info cache due to event network-changed-1e0ac67c-3039-4c36-831a-d32977fcab32. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2260.893823] env[63279]: DEBUG oslo_concurrency.lockutils [req-5733de92-bb6f-44a3-819d-26368379f54f req-4bb85da6-315b-4aad-873e-6936ce99dea3 service nova] Acquiring lock "refresh_cache-1fca4e5c-fe2c-4b61-bed4-52c7770def7c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2260.893935] env[63279]: DEBUG oslo_concurrency.lockutils [req-5733de92-bb6f-44a3-819d-26368379f54f req-4bb85da6-315b-4aad-873e-6936ce99dea3 service nova] Acquired lock "refresh_cache-1fca4e5c-fe2c-4b61-bed4-52c7770def7c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2260.894069] env[63279]: DEBUG nova.network.neutron [req-5733de92-bb6f-44a3-819d-26368379f54f req-4bb85da6-315b-4aad-873e-6936ce99dea3 service nova] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Refreshing network info cache for port 1e0ac67c-3039-4c36-831a-d32977fcab32 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2261.066941] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2261.067597] env[63279]: DEBUG nova.compute.manager [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2261.070324] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.009s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2261.072101] env[63279]: INFO nova.compute.claims [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2261.108341] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "9b98a316-71da-45fb-b895-553f179fe7d9" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2261.108656] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "9b98a316-71da-45fb-b895-553f179fe7d9" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2261.108904] env[63279]: INFO nova.compute.manager [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Attaching volume abfd7860-7a69-49db-b638-22de4f3a1b94 to /dev/sdb [ 2261.139866] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc467ba-4031-46a5-b65d-c8c408f2d7f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.146962] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b095fe7-68e9-46df-b098-5f0f66afbb5a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.159604] env[63279]: DEBUG nova.virt.block_device [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Updating existing volume attachment record: defdd106-ca3b-4b64-9762-f2a89dcea3e7 {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2261.180144] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]528dcfea-a5f2-53e9-a6a9-4f4593632a12, 'name': SearchDatastore_Task, 'duration_secs': 0.032342} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2261.180484] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2261.180823] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1fca4e5c-fe2c-4b61-bed4-52c7770def7c/1fca4e5c-fe2c-4b61-bed4-52c7770def7c.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2261.181292] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e6caf54f-47d7-44e0-b5fe-b85df940dfd7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.187610] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2261.187610] env[63279]: value = "task-2087888" [ 2261.187610] env[63279]: _type = "Task" [ 2261.187610] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2261.196963] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2261.307874] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Acquiring lock "ecec02e8-8ddf-4997-9547-ccee0db1938b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2261.308185] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Lock "ecec02e8-8ddf-4997-9547-ccee0db1938b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2261.308428] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Acquiring lock "ecec02e8-8ddf-4997-9547-ccee0db1938b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2261.308644] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Lock "ecec02e8-8ddf-4997-9547-ccee0db1938b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2261.308820] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Lock "ecec02e8-8ddf-4997-9547-ccee0db1938b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2261.311022] env[63279]: INFO nova.compute.manager [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Terminating instance [ 2262.075533] env[63279]: DEBUG nova.compute.utils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2262.079095] env[63279]: DEBUG nova.compute.manager [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2262.079319] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2262.082967] env[63279]: DEBUG nova.compute.manager [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2262.083147] env[63279]: DEBUG nova.network.neutron [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2262.088465] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe49ae8-c719-4613-9120-0b71114a737b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.100185] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087888, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.605528} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2262.100463] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2262.101862] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1fca4e5c-fe2c-4b61-bed4-52c7770def7c/1fca4e5c-fe2c-4b61-bed4-52c7770def7c.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2262.102086] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2262.102317] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ff68b3d-2312-4ebf-961e-7f6861c95faa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.104131] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea4cfd5f-9ab8-4d47-b0ee-a1c794abb24e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.112380] env[63279]: DEBUG oslo_vmware.api [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Waiting for the task: (returnval){ [ 2262.112380] env[63279]: value = "task-2087892" [ 2262.112380] env[63279]: _type = "Task" [ 2262.112380] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2262.112629] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2262.112629] env[63279]: value = "task-2087893" [ 2262.112629] env[63279]: _type = "Task" [ 2262.112629] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2262.129684] env[63279]: DEBUG oslo_vmware.api [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087892, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2262.130153] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087893, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2262.133240] env[63279]: DEBUG nova.policy [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99f3a4d8a93c4bb98ea3c4bf110c066b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7674483744fe490b8cbe75532dfad77c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2262.356987] env[63279]: DEBUG nova.network.neutron [req-5733de92-bb6f-44a3-819d-26368379f54f req-4bb85da6-315b-4aad-873e-6936ce99dea3 service nova] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Updated VIF entry in instance network info cache for port 1e0ac67c-3039-4c36-831a-d32977fcab32. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2262.357422] env[63279]: DEBUG nova.network.neutron [req-5733de92-bb6f-44a3-819d-26368379f54f req-4bb85da6-315b-4aad-873e-6936ce99dea3 service nova] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Updating instance_info_cache with network_info: [{"id": "1e0ac67c-3039-4c36-831a-d32977fcab32", "address": "fa:16:3e:85:8f:65", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e0ac67c-30", "ovs_interfaceid": "1e0ac67c-3039-4c36-831a-d32977fcab32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2262.468790] env[63279]: DEBUG nova.network.neutron [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Successfully created port: 15cdfe62-d983-4e01-beb9-1947d51443e0 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2262.583208] env[63279]: DEBUG nova.compute.manager [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2262.583726] env[63279]: DEBUG nova.compute.manager [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2262.590170] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bceeb099-295d-4000-bb3f-46a2d57412d5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.594079] env[63279]: INFO nova.compute.manager [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Unrescuing [ 2262.594079] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "refresh_cache-21aca39e-8513-49bd-93e9-0d101cee591f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2262.594079] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquired lock "refresh_cache-21aca39e-8513-49bd-93e9-0d101cee591f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2262.594079] env[63279]: DEBUG nova.network.neutron [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2262.626834] env[63279]: DEBUG oslo_vmware.api [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087892, 'name': PowerOffVM_Task, 'duration_secs': 0.230737} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2262.632970] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2262.633230] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2262.633779] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087893, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078763} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2262.634176] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ae3afff-95fe-4115-b349-642e17a86e3d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.635598] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2262.637174] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fdd7a23-876a-48c9-8842-5b863eaf0fd3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.660059] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 1fca4e5c-fe2c-4b61-bed4-52c7770def7c/1fca4e5c-fe2c-4b61-bed4-52c7770def7c.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2262.664589] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aaafbb1c-d790-48b1-8a6b-e62dc9b0000f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.684151] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2262.684151] env[63279]: value = "task-2087895" [ 2262.684151] env[63279]: _type = "Task" [ 2262.684151] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2262.694884] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087895, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2262.858544] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b78168-10a9-4291-8bc8-6db06de04300 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.863130] env[63279]: DEBUG oslo_concurrency.lockutils [req-5733de92-bb6f-44a3-819d-26368379f54f req-4bb85da6-315b-4aad-873e-6936ce99dea3 service nova] Releasing lock "refresh_cache-1fca4e5c-fe2c-4b61-bed4-52c7770def7c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2262.866182] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb683c9-7f22-4aea-9b9c-50daddb95dc0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.896185] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d27f7e6-d517-43b6-8a00-044ef6be4b1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.904021] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42f9c02-a98d-41d7-ab67-906197aa9926 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.917757] env[63279]: DEBUG nova.compute.provider_tree [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2263.101989] env[63279]: INFO nova.compute.manager [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] instance snapshotting [ 2263.102686] env[63279]: DEBUG nova.objects.instance [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'flavor' on Instance uuid 686a7ce2-2d07-411e-91d6-0471c55c3728 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2263.194129] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087895, 'name': ReconfigVM_Task, 'duration_secs': 0.352077} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2263.194411] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 1fca4e5c-fe2c-4b61-bed4-52c7770def7c/1fca4e5c-fe2c-4b61-bed4-52c7770def7c.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2263.194997] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-633ef38b-15bc-43e9-8e65-b6d572160057 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.201719] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2263.201719] env[63279]: value = "task-2087896" [ 2263.201719] env[63279]: _type = "Task" [ 2263.201719] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2263.211105] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087896, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.356113] env[63279]: DEBUG nova.network.neutron [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Updating instance_info_cache with network_info: [{"id": "d6d02bd7-d44f-434c-8e3f-5bdd56aed938", "address": "fa:16:3e:91:cc:87", "network": {"id": "1ebc2797-412f-4da1-ba28-8b54789f9203", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-665948294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "afe3ab970f3249719809afa9f9b11c1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6d02bd7-d4", "ovs_interfaceid": "d6d02bd7-d44f-434c-8e3f-5bdd56aed938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2263.421216] env[63279]: DEBUG nova.scheduler.client.report [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2263.596122] env[63279]: DEBUG nova.compute.manager [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2263.608693] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7d0624-bb80-4702-a87e-2ee963be9fd1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.632340] env[63279]: DEBUG nova.virt.hardware [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2263.632582] env[63279]: DEBUG nova.virt.hardware [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2263.632812] env[63279]: DEBUG nova.virt.hardware [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2263.632921] env[63279]: DEBUG nova.virt.hardware [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2263.633080] env[63279]: DEBUG nova.virt.hardware [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2263.633236] env[63279]: DEBUG nova.virt.hardware [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2263.633444] env[63279]: DEBUG nova.virt.hardware [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2263.633608] env[63279]: DEBUG nova.virt.hardware [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2263.633775] env[63279]: DEBUG nova.virt.hardware [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2263.633935] env[63279]: DEBUG nova.virt.hardware [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2263.634125] env[63279]: DEBUG nova.virt.hardware [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2263.634856] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c427074c-84d3-4a9f-a0a2-85d7eb5d7a50 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.637752] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1563f44-e80c-4050-a76f-0f85f6f056da {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.646222] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae06ef7-3a95-43bd-a168-412317051a78 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.711988] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087896, 'name': Rename_Task, 'duration_secs': 0.352755} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2263.712379] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2263.712710] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2472228b-77ee-4ddd-80bd-147526ea066d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.719256] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2263.719256] env[63279]: value = "task-2087898" [ 2263.719256] env[63279]: _type = "Task" [ 2263.719256] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2263.727018] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087898, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.857785] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Releasing lock "refresh_cache-21aca39e-8513-49bd-93e9-0d101cee591f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2263.858486] env[63279]: DEBUG nova.objects.instance [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lazy-loading 'flavor' on Instance uuid 21aca39e-8513-49bd-93e9-0d101cee591f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2263.926091] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.856s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2263.926664] env[63279]: DEBUG nova.compute.manager [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2263.929223] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.705s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2263.930964] env[63279]: INFO nova.compute.claims [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2264.153767] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2264.154119] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a357f7a7-28de-45ba-9668-024d0b269a6a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.161369] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2264.161369] env[63279]: value = "task-2087899" [ 2264.161369] env[63279]: _type = "Task" [ 2264.161369] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2264.169100] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087899, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.229495] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087898, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.364317] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648bdde6-34d0-42f1-864e-997d5f56a48d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.385868] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2264.386196] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d11c3f63-df98-44b1-ae32-18c58d25e24b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.393043] env[63279]: DEBUG oslo_vmware.api [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2264.393043] env[63279]: value = "task-2087900" [ 2264.393043] env[63279]: _type = "Task" [ 2264.393043] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2264.400944] env[63279]: DEBUG oslo_vmware.api [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087900, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.435479] env[63279]: DEBUG nova.compute.utils [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2264.438921] env[63279]: DEBUG nova.compute.manager [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Not allocating networking since 'none' was specified. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2264.672087] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087899, 'name': CreateSnapshot_Task, 'duration_secs': 0.417326} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2264.672087] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2264.672706] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82fa2b04-02da-40c3-9a50-211b61815067 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.730689] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087898, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.903469] env[63279]: DEBUG oslo_vmware.api [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087900, 'name': PowerOffVM_Task, 'duration_secs': 0.208242} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2264.903735] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2264.909121] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Reconfiguring VM instance instance-0000005f to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2264.909380] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd997e61-2a68-4dd2-957a-168a9397c7a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.926194] env[63279]: DEBUG oslo_vmware.api [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2264.926194] env[63279]: value = "task-2087901" [ 2264.926194] env[63279]: _type = "Task" [ 2264.926194] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2264.933331] env[63279]: DEBUG oslo_vmware.api [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087901, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.939879] env[63279]: DEBUG nova.compute.manager [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2265.157855] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96a1bce-de5a-49ed-aabc-0545f1b4c460 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.165744] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6e86c0-8e9f-47f7-a43b-8d9378ecef62 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.202062] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2265.202489] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-418d5f89-824b-4474-ad34-1a4985f12e12 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.205600] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c12f790-7017-4e32-98e3-30c78b7b92c1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.213897] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06150b4f-ba47-43c9-b339-413288771243 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.217535] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2265.217535] env[63279]: value = "task-2087902" [ 2265.217535] env[63279]: _type = "Task" [ 2265.217535] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.228185] env[63279]: DEBUG nova.compute.provider_tree [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2265.235244] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087902, 'name': CloneVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.239822] env[63279]: DEBUG oslo_vmware.api [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2087898, 'name': PowerOnVM_Task, 'duration_secs': 1.040889} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2265.239923] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2265.240087] env[63279]: INFO nova.compute.manager [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Took 8.96 seconds to spawn the instance on the hypervisor. [ 2265.240281] env[63279]: DEBUG nova.compute.manager [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2265.240963] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9185f7b3-d856-4342-91ee-5528a99a2489 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.436854] env[63279]: DEBUG oslo_vmware.api [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087901, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.702963] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2265.703233] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427756', 'volume_id': 'abfd7860-7a69-49db-b638-22de4f3a1b94', 'name': 'volume-abfd7860-7a69-49db-b638-22de4f3a1b94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9b98a316-71da-45fb-b895-553f179fe7d9', 'attached_at': '', 'detached_at': '', 'volume_id': 'abfd7860-7a69-49db-b638-22de4f3a1b94', 'serial': 'abfd7860-7a69-49db-b638-22de4f3a1b94'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2265.704146] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67bb116-9fc8-47d8-8bad-5312aa1c7588 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.724540] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470888d0-aaf6-4a70-9d54-54c955cbd0c0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.732116] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087902, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.746311] env[63279]: DEBUG nova.scheduler.client.report [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2265.756840] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] volume-abfd7860-7a69-49db-b638-22de4f3a1b94/volume-abfd7860-7a69-49db-b638-22de4f3a1b94.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2265.761295] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb4c79bc-dd50-4f37-bf1c-3d8ce9e52b73 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.776689] env[63279]: INFO nova.compute.manager [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Took 17.83 seconds to build instance. [ 2265.783192] env[63279]: DEBUG oslo_vmware.api [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2265.783192] env[63279]: value = "task-2087903" [ 2265.783192] env[63279]: _type = "Task" [ 2265.783192] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.791515] env[63279]: DEBUG oslo_vmware.api [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087903, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.937142] env[63279]: DEBUG oslo_vmware.api [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087901, 'name': ReconfigVM_Task, 'duration_secs': 0.565526} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2265.937371] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Reconfigured VM instance instance-0000005f to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2265.937561] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2265.937812] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfb5a7fd-085c-4557-be29-06aa73b14896 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.943682] env[63279]: DEBUG oslo_vmware.api [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2265.943682] env[63279]: value = "task-2087904" [ 2265.943682] env[63279]: _type = "Task" [ 2265.943682] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.952419] env[63279]: DEBUG nova.compute.manager [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2265.954682] env[63279]: DEBUG oslo_vmware.api [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087904, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.976701] env[63279]: DEBUG nova.virt.hardware [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2265.976969] env[63279]: DEBUG nova.virt.hardware [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2265.977149] env[63279]: DEBUG nova.virt.hardware [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2265.977588] env[63279]: DEBUG nova.virt.hardware [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2265.977588] env[63279]: DEBUG nova.virt.hardware [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2265.978100] env[63279]: DEBUG nova.virt.hardware [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2265.978271] env[63279]: DEBUG nova.virt.hardware [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2265.978553] env[63279]: DEBUG nova.virt.hardware [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2265.978855] env[63279]: DEBUG nova.virt.hardware [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2265.979293] env[63279]: DEBUG nova.virt.hardware [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2265.979437] env[63279]: DEBUG nova.virt.hardware [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2265.980467] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c537c503-f918-49b6-b728-2f22503fce62 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.988728] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3556a20b-0cd3-4873-97e4-63a179152402 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.002367] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Instance VIF info [] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2266.008098] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Creating folder: Project (c6d93518b6b540c19e86e24bba795494). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2266.008409] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d8e5937-6b49-473f-b881-6c4606891241 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.018907] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Created folder: Project (c6d93518b6b540c19e86e24bba795494) in parent group-v427491. [ 2266.019684] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Creating folder: Instances. Parent ref: group-v427759. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2266.019684] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25c18463-ac28-4ec8-b784-390ebf3f72b4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.028821] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Created folder: Instances in parent group-v427759. [ 2266.029082] env[63279]: DEBUG oslo.service.loopingcall [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2266.029735] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2266.029735] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b1d77f9-28e9-42d1-9b9c-f1540e07cc90 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.048495] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2266.048495] env[63279]: value = "task-2087907" [ 2266.048495] env[63279]: _type = "Task" [ 2266.048495] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.056383] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087907, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.231076] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087902, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.259176] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2266.259702] env[63279]: DEBUG nova.compute.manager [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2266.263279] env[63279]: DEBUG oslo_concurrency.lockutils [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.689s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2266.263279] env[63279]: DEBUG nova.objects.instance [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lazy-loading 'resources' on Instance uuid 6b4a94f9-1cd9-466f-844f-8d692b732abc {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2266.278328] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c0088455-9b66-4574-b3c8-cfeeda174077 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "1fca4e5c-fe2c-4b61-bed4-52c7770def7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.343s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2266.295173] env[63279]: DEBUG oslo_vmware.api [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087903, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.453728] env[63279]: DEBUG oslo_vmware.api [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087904, 'name': PowerOnVM_Task, 'duration_secs': 0.447473} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2266.454017] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2266.454255] env[63279]: DEBUG nova.compute.manager [None req-0cc24c34-f137-4057-a9de-d78df4438893 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2266.455009] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a1d3b0-da33-4070-b388-30a8e5220179 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.558760] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087907, 'name': CreateVM_Task, 'duration_secs': 0.383852} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2266.558940] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2266.559421] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2266.559638] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2266.559925] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2266.560205] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1b1144f-1c1a-40c2-9783-e6684f4642fc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.564703] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2266.564703] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52200547-b04f-7ed0-00f8-08bad045c31f" [ 2266.564703] env[63279]: _type = "Task" [ 2266.564703] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.572602] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52200547-b04f-7ed0-00f8-08bad045c31f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.599195] env[63279]: DEBUG nova.compute.manager [req-f54214bb-3459-4367-900b-514a8999d5a1 req-bcac207b-f972-44be-8de3-592ffc8aa8c0 service nova] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Received event network-changed-1e0ac67c-3039-4c36-831a-d32977fcab32 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2266.599408] env[63279]: DEBUG nova.compute.manager [req-f54214bb-3459-4367-900b-514a8999d5a1 req-bcac207b-f972-44be-8de3-592ffc8aa8c0 service nova] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Refreshing instance network info cache due to event network-changed-1e0ac67c-3039-4c36-831a-d32977fcab32. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2266.599631] env[63279]: DEBUG oslo_concurrency.lockutils [req-f54214bb-3459-4367-900b-514a8999d5a1 req-bcac207b-f972-44be-8de3-592ffc8aa8c0 service nova] Acquiring lock "refresh_cache-1fca4e5c-fe2c-4b61-bed4-52c7770def7c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2266.599769] env[63279]: DEBUG oslo_concurrency.lockutils [req-f54214bb-3459-4367-900b-514a8999d5a1 req-bcac207b-f972-44be-8de3-592ffc8aa8c0 service nova] Acquired lock "refresh_cache-1fca4e5c-fe2c-4b61-bed4-52c7770def7c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2266.599932] env[63279]: DEBUG nova.network.neutron [req-f54214bb-3459-4367-900b-514a8999d5a1 req-bcac207b-f972-44be-8de3-592ffc8aa8c0 service nova] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Refreshing network info cache for port 1e0ac67c-3039-4c36-831a-d32977fcab32 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2266.731801] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087902, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.765770] env[63279]: DEBUG nova.compute.utils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2266.770020] env[63279]: DEBUG nova.compute.manager [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2266.770215] env[63279]: DEBUG nova.network.neutron [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2266.797411] env[63279]: DEBUG oslo_vmware.api [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087903, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.810101] env[63279]: DEBUG nova.policy [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '655d692da88947b89104e1f14f7d71f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5a719a21fe248c49d0d0151d218866b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2266.841160] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2266.841408] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2267.007470] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17994fb-dc29-47ce-8582-e40d4df8da9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.015485] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4afa606-946a-4269-9df9-93f08283ff19 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.047267] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4533d333-da14-42d0-986b-cfed9168d712 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.055040] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6abd223-c226-4f54-9524-8ade33db3435 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.068716] env[63279]: DEBUG nova.compute.provider_tree [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2267.080656] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52200547-b04f-7ed0-00f8-08bad045c31f, 'name': SearchDatastore_Task, 'duration_secs': 0.019334} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.081550] env[63279]: DEBUG nova.network.neutron [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Successfully created port: ee08f9b0-ce61-4fa5-bf67-3a97acafc55e {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2267.083729] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2267.083989] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2267.084303] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2267.084495] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2267.084693] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2267.085254] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b522d559-b9aa-477e-a7a7-d87a66271cfc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.094669] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2267.094870] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2267.095637] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63f6b8e2-76b7-4812-b515-77db700d1605 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.100931] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2267.100931] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52fb8341-ea50-be2b-3aad-41b01d5adced" [ 2267.100931] env[63279]: _type = "Task" [ 2267.100931] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.110991] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fb8341-ea50-be2b-3aad-41b01d5adced, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.232643] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087902, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.271723] env[63279]: DEBUG nova.compute.manager [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2267.296111] env[63279]: DEBUG oslo_vmware.api [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087903, 'name': ReconfigVM_Task, 'duration_secs': 1.339799} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.296414] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Reconfigured VM instance instance-00000054 to attach disk [datastore1] volume-abfd7860-7a69-49db-b638-22de4f3a1b94/volume-abfd7860-7a69-49db-b638-22de4f3a1b94.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2267.301281] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01e8b901-7e94-4caf-9d81-73882fa1aa06 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.315970] env[63279]: DEBUG oslo_vmware.api [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2267.315970] env[63279]: value = "task-2087908" [ 2267.315970] env[63279]: _type = "Task" [ 2267.315970] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.325713] env[63279]: DEBUG oslo_vmware.api [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087908, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.344622] env[63279]: DEBUG nova.compute.utils [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2267.489351] env[63279]: DEBUG nova.network.neutron [req-f54214bb-3459-4367-900b-514a8999d5a1 req-bcac207b-f972-44be-8de3-592ffc8aa8c0 service nova] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Updated VIF entry in instance network info cache for port 1e0ac67c-3039-4c36-831a-d32977fcab32. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2267.489724] env[63279]: DEBUG nova.network.neutron [req-f54214bb-3459-4367-900b-514a8999d5a1 req-bcac207b-f972-44be-8de3-592ffc8aa8c0 service nova] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Updating instance_info_cache with network_info: [{"id": "1e0ac67c-3039-4c36-831a-d32977fcab32", "address": "fa:16:3e:85:8f:65", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e0ac67c-30", "ovs_interfaceid": "1e0ac67c-3039-4c36-831a-d32977fcab32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2267.575984] env[63279]: DEBUG nova.scheduler.client.report [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2267.611217] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fb8341-ea50-be2b-3aad-41b01d5adced, 'name': SearchDatastore_Task, 'duration_secs': 0.010656} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.611956] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-068295f8-c408-4bb3-9220-aa717d1d596a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.616929] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2267.616929] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52cf79e8-bc5b-616c-ac43-007736f5c279" [ 2267.616929] env[63279]: _type = "Task" [ 2267.616929] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.624058] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52cf79e8-bc5b-616c-ac43-007736f5c279, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.733609] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087902, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.825992] env[63279]: DEBUG oslo_vmware.api [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087908, 'name': ReconfigVM_Task, 'duration_secs': 0.146711} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.826321] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427756', 'volume_id': 'abfd7860-7a69-49db-b638-22de4f3a1b94', 'name': 'volume-abfd7860-7a69-49db-b638-22de4f3a1b94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9b98a316-71da-45fb-b895-553f179fe7d9', 'attached_at': '', 'detached_at': '', 'volume_id': 'abfd7860-7a69-49db-b638-22de4f3a1b94', 'serial': 'abfd7860-7a69-49db-b638-22de4f3a1b94'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2267.847042] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.993364] env[63279]: DEBUG oslo_concurrency.lockutils [req-f54214bb-3459-4367-900b-514a8999d5a1 req-bcac207b-f972-44be-8de3-592ffc8aa8c0 service nova] Releasing lock "refresh_cache-1fca4e5c-fe2c-4b61-bed4-52c7770def7c" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2268.080765] env[63279]: DEBUG oslo_concurrency.lockutils [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.818s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.102093] env[63279]: INFO nova.scheduler.client.report [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleted allocations for instance 6b4a94f9-1cd9-466f-844f-8d692b732abc [ 2268.127996] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52cf79e8-bc5b-616c-ac43-007736f5c279, 'name': SearchDatastore_Task, 'duration_secs': 0.035612} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2268.128316] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2268.128913] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84/69c88844-84c7-4272-a2c4-051f1499df84.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2268.128913] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0b0ae50-5837-4671-ae3e-fb7b5e84a540 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.137523] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2268.137523] env[63279]: value = "task-2087909" [ 2268.137523] env[63279]: _type = "Task" [ 2268.137523] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2268.146230] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087909, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.234269] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087902, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.281720] env[63279]: DEBUG nova.compute.manager [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2268.327948] env[63279]: DEBUG nova.virt.hardware [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2268.328378] env[63279]: DEBUG nova.virt.hardware [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2268.328649] env[63279]: DEBUG nova.virt.hardware [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2268.328936] env[63279]: DEBUG nova.virt.hardware [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2268.329158] env[63279]: DEBUG nova.virt.hardware [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2268.329425] env[63279]: DEBUG nova.virt.hardware [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2268.329744] env[63279]: DEBUG nova.virt.hardware [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2268.330061] env[63279]: DEBUG nova.virt.hardware [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2268.330409] env[63279]: DEBUG nova.virt.hardware [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2268.330724] env[63279]: DEBUG nova.virt.hardware [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2268.331025] env[63279]: DEBUG nova.virt.hardware [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2268.334660] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bddee29-f264-4d41-b88f-a3b30254c05b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.345372] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a937f8-6f03-4ce6-8baf-b7ef22c98497 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.431681] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2268.431940] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2268.432187] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Deleting the datastore file [datastore1] ecec02e8-8ddf-4997-9547-ccee0db1938b {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2268.432664] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4472913e-3fb2-4ce4-86ae-94351fb7d7ef {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.441641] env[63279]: DEBUG oslo_vmware.api [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Waiting for the task: (returnval){ [ 2268.441641] env[63279]: value = "task-2087910" [ 2268.441641] env[63279]: _type = "Task" [ 2268.441641] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2268.455490] env[63279]: DEBUG oslo_vmware.api [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087910, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.610414] env[63279]: DEBUG oslo_concurrency.lockutils [None req-24dd457b-86eb-4407-ae8c-0e21ece95320 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "6b4a94f9-1cd9-466f-844f-8d692b732abc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.609s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.656606] env[63279]: DEBUG nova.network.neutron [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Successfully updated port: f3d164be-d41c-4e5a-97f5-a50cb539a06a {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2268.667437] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087909, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504588} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2268.667832] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84/69c88844-84c7-4272-a2c4-051f1499df84.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2268.668164] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2268.668506] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e89e635-8639-4b9b-bab2-a005e0a5741b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.676291] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2268.676291] env[63279]: value = "task-2087911" [ 2268.676291] env[63279]: _type = "Task" [ 2268.676291] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2268.686698] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087911, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.734733] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087902, 'name': CloneVM_Task} progress is 95%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.899409] env[63279]: DEBUG nova.objects.instance [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lazy-loading 'flavor' on Instance uuid 9b98a316-71da-45fb-b895-553f179fe7d9 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2268.934028] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2268.934028] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2268.934204] env[63279]: INFO nova.compute.manager [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Attaching volume 34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9 to /dev/sdb [ 2268.951619] env[63279]: DEBUG oslo_vmware.api [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Task: {'id': task-2087910, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.372478} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2268.951945] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2268.952185] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2268.952397] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2268.952581] env[63279]: INFO nova.compute.manager [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Took 6.87 seconds to destroy the instance on the hypervisor. [ 2268.952836] env[63279]: DEBUG oslo.service.loopingcall [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2268.953233] env[63279]: DEBUG nova.compute.manager [-] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2268.953233] env[63279]: DEBUG nova.network.neutron [-] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2268.968423] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd238c3-dd72-4b74-bbbb-52e284383c5b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.976160] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529d3cc3-7ec6-4bf9-a9fe-a0e7296265cb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.990293] env[63279]: DEBUG nova.virt.block_device [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Updating existing volume attachment record: e972ef7c-6469-48db-b672-d7336c9affe8 {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2269.039294] env[63279]: DEBUG nova.compute.manager [req-7ada15ef-79e2-49ae-bdaa-11ccacae1927 req-b522044b-b79b-4e2a-91bb-1e787e95e585 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Received event network-vif-plugged-f3d164be-d41c-4e5a-97f5-a50cb539a06a {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2269.039294] env[63279]: DEBUG oslo_concurrency.lockutils [req-7ada15ef-79e2-49ae-bdaa-11ccacae1927 req-b522044b-b79b-4e2a-91bb-1e787e95e585 service nova] Acquiring lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2269.039294] env[63279]: DEBUG oslo_concurrency.lockutils [req-7ada15ef-79e2-49ae-bdaa-11ccacae1927 req-b522044b-b79b-4e2a-91bb-1e787e95e585 service nova] Lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2269.039294] env[63279]: DEBUG oslo_concurrency.lockutils [req-7ada15ef-79e2-49ae-bdaa-11ccacae1927 req-b522044b-b79b-4e2a-91bb-1e787e95e585 service nova] Lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2269.039294] env[63279]: DEBUG nova.compute.manager [req-7ada15ef-79e2-49ae-bdaa-11ccacae1927 req-b522044b-b79b-4e2a-91bb-1e787e95e585 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] No waiting events found dispatching network-vif-plugged-f3d164be-d41c-4e5a-97f5-a50cb539a06a {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2269.039294] env[63279]: WARNING nova.compute.manager [req-7ada15ef-79e2-49ae-bdaa-11ccacae1927 req-b522044b-b79b-4e2a-91bb-1e787e95e585 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Received unexpected event network-vif-plugged-f3d164be-d41c-4e5a-97f5-a50cb539a06a for instance with vm_state active and task_state None. [ 2269.162874] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2269.164889] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2269.164889] env[63279]: DEBUG nova.network.neutron [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2269.189271] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087911, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069425} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2269.189271] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2269.191242] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4e1502-9870-4ef7-b2ef-fba576267e8c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.219092] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84/69c88844-84c7-4272-a2c4-051f1499df84.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2269.223341] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d63d71b-2e90-4d67-aac8-45c27e9848fa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.261266] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087902, 'name': CloneVM_Task, 'duration_secs': 3.81536} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2269.264810] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Created linked-clone VM from snapshot [ 2269.264810] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2269.264810] env[63279]: value = "task-2087913" [ 2269.264810] env[63279]: _type = "Task" [ 2269.264810] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2269.266189] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7f3eae-ef7d-44b2-a4b5-6664c7533ae0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.282688] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Uploading image a8552abc-0ec1-42e4-85be-a54106036a05 {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2269.287147] env[63279]: DEBUG nova.network.neutron [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Successfully updated port: ee08f9b0-ce61-4fa5-bf67-3a97acafc55e {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2269.295311] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087913, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2269.318249] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2269.318249] env[63279]: value = "vm-427758" [ 2269.318249] env[63279]: _type = "VirtualMachine" [ 2269.318249] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2269.321270] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-72d6fb57-a2e9-46da-a23b-6b7d12129417 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.325743] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lease: (returnval){ [ 2269.325743] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5281b78d-2ea0-254d-b256-8a9da703eb90" [ 2269.325743] env[63279]: _type = "HttpNfcLease" [ 2269.325743] env[63279]: } obtained for exporting VM: (result){ [ 2269.325743] env[63279]: value = "vm-427758" [ 2269.325743] env[63279]: _type = "VirtualMachine" [ 2269.325743] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2269.326028] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the lease: (returnval){ [ 2269.326028] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5281b78d-2ea0-254d-b256-8a9da703eb90" [ 2269.326028] env[63279]: _type = "HttpNfcLease" [ 2269.326028] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2269.336084] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2269.336084] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5281b78d-2ea0-254d-b256-8a9da703eb90" [ 2269.336084] env[63279]: _type = "HttpNfcLease" [ 2269.336084] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2269.404635] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a1749bde-6a19-48fd-a4cc-beb180cafb1c tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "9b98a316-71da-45fb-b895-553f179fe7d9" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.296s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2269.492224] env[63279]: DEBUG nova.network.neutron [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Successfully updated port: 15cdfe62-d983-4e01-beb9-1947d51443e0 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2269.548116] env[63279]: DEBUG nova.compute.manager [req-069d5eb8-8e82-4945-ac89-6d2b150dae0a req-8e2cdcf0-9022-49b9-884f-1e8278c597c5 service nova] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Received event network-vif-plugged-ee08f9b0-ce61-4fa5-bf67-3a97acafc55e {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2269.548116] env[63279]: DEBUG oslo_concurrency.lockutils [req-069d5eb8-8e82-4945-ac89-6d2b150dae0a req-8e2cdcf0-9022-49b9-884f-1e8278c597c5 service nova] Acquiring lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2269.548116] env[63279]: DEBUG oslo_concurrency.lockutils [req-069d5eb8-8e82-4945-ac89-6d2b150dae0a req-8e2cdcf0-9022-49b9-884f-1e8278c597c5 service nova] Lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2269.548312] env[63279]: DEBUG oslo_concurrency.lockutils [req-069d5eb8-8e82-4945-ac89-6d2b150dae0a req-8e2cdcf0-9022-49b9-884f-1e8278c597c5 service nova] Lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2269.548515] env[63279]: DEBUG nova.compute.manager [req-069d5eb8-8e82-4945-ac89-6d2b150dae0a req-8e2cdcf0-9022-49b9-884f-1e8278c597c5 service nova] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] No waiting events found dispatching network-vif-plugged-ee08f9b0-ce61-4fa5-bf67-3a97acafc55e {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2269.548699] env[63279]: WARNING nova.compute.manager [req-069d5eb8-8e82-4945-ac89-6d2b150dae0a req-8e2cdcf0-9022-49b9-884f-1e8278c597c5 service nova] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Received unexpected event network-vif-plugged-ee08f9b0-ce61-4fa5-bf67-3a97acafc55e for instance with vm_state building and task_state spawning. [ 2269.616121] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "21aca39e-8513-49bd-93e9-0d101cee591f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2269.616121] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "21aca39e-8513-49bd-93e9-0d101cee591f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2269.616121] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "21aca39e-8513-49bd-93e9-0d101cee591f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2269.616121] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "21aca39e-8513-49bd-93e9-0d101cee591f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2269.616121] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "21aca39e-8513-49bd-93e9-0d101cee591f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2269.618149] env[63279]: INFO nova.compute.manager [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Terminating instance [ 2269.704484] env[63279]: WARNING nova.network.neutron [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] e0e614b7-de4b-485e-8824-582faae8febd already exists in list: networks containing: ['e0e614b7-de4b-485e-8824-582faae8febd']. ignoring it [ 2269.766175] env[63279]: DEBUG nova.network.neutron [-] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2269.780751] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087913, 'name': ReconfigVM_Task, 'duration_secs': 0.289749} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2269.781054] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84/69c88844-84c7-4272-a2c4-051f1499df84.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2269.781892] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f9c2490-601e-4483-b119-1535e09d79bc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.789465] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2269.789465] env[63279]: value = "task-2087917" [ 2269.789465] env[63279]: _type = "Task" [ 2269.789465] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2269.798826] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2269.799069] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2269.799168] env[63279]: DEBUG nova.network.neutron [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2269.802112] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087917, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2269.835481] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2269.835481] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5281b78d-2ea0-254d-b256-8a9da703eb90" [ 2269.835481] env[63279]: _type = "HttpNfcLease" [ 2269.835481] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2269.836117] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2269.836117] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5281b78d-2ea0-254d-b256-8a9da703eb90" [ 2269.836117] env[63279]: _type = "HttpNfcLease" [ 2269.836117] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2269.836900] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68be4d5-6b7c-46ee-8ead-93b83de046a4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.845447] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523519bc-a7f0-1e26-ec10-3b4df4f3a3a1/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2269.845671] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523519bc-a7f0-1e26-ec10-3b4df4f3a3a1/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2269.950668] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5570bfa7-e46c-4c43-b510-aaf8d10d3a21 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.996809] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2269.996966] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquired lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2269.997550] env[63279]: DEBUG nova.network.neutron [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2270.080799] env[63279]: DEBUG nova.network.neutron [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updating instance_info_cache with network_info: [{"id": "0bd52488-d960-42c9-8077-fc8fe9eda956", "address": "fa:16:3e:16:45:3c", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bd52488-d9", "ovs_interfaceid": "0bd52488-d960-42c9-8077-fc8fe9eda956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f3d164be-d41c-4e5a-97f5-a50cb539a06a", "address": "fa:16:3e:35:3a:40", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3d164be-d4", "ovs_interfaceid": "f3d164be-d41c-4e5a-97f5-a50cb539a06a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2270.121566] env[63279]: DEBUG nova.compute.manager [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2270.121879] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2270.122956] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437a9fd2-0526-4641-aab2-df8064b31422 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.131252] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2270.131515] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86bae485-c5ab-4196-bceb-57a0ad07979d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.138479] env[63279]: DEBUG oslo_vmware.api [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2270.138479] env[63279]: value = "task-2087918" [ 2270.138479] env[63279]: _type = "Task" [ 2270.138479] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2270.146924] env[63279]: DEBUG oslo_vmware.api [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087918, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2270.208259] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "9b98a316-71da-45fb-b895-553f179fe7d9" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2270.208611] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "9b98a316-71da-45fb-b895-553f179fe7d9" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2270.271689] env[63279]: INFO nova.compute.manager [-] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Took 1.32 seconds to deallocate network for instance. [ 2270.300763] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087917, 'name': Rename_Task, 'duration_secs': 0.168545} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2270.301444] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2270.303866] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-882000db-dfaa-4cf8-bd9a-8198e26c9744 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.311240] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2270.311240] env[63279]: value = "task-2087919" [ 2270.311240] env[63279]: _type = "Task" [ 2270.311240] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2270.321918] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087919, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2270.338056] env[63279]: DEBUG nova.network.neutron [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2270.494889] env[63279]: DEBUG nova.network.neutron [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating instance_info_cache with network_info: [{"id": "ee08f9b0-ce61-4fa5-bf67-3a97acafc55e", "address": "fa:16:3e:16:9a:ab", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee08f9b0-ce", "ovs_interfaceid": "ee08f9b0-ce61-4fa5-bf67-3a97acafc55e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2270.534928] env[63279]: DEBUG nova.network.neutron [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2270.584979] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2270.585668] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2270.585932] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2270.586805] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc37658a-4dee-4c03-94bd-c91c9262f4c6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.614667] env[63279]: DEBUG nova.virt.hardware [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2270.615137] env[63279]: DEBUG nova.virt.hardware [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2270.615550] env[63279]: DEBUG nova.virt.hardware [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2270.615777] env[63279]: DEBUG nova.virt.hardware [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2270.615938] env[63279]: DEBUG nova.virt.hardware [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2270.616243] env[63279]: DEBUG nova.virt.hardware [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2270.616575] env[63279]: DEBUG nova.virt.hardware [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2270.616912] env[63279]: DEBUG nova.virt.hardware [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2270.616983] env[63279]: DEBUG nova.virt.hardware [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2270.617280] env[63279]: DEBUG nova.virt.hardware [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2270.617559] env[63279]: DEBUG nova.virt.hardware [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2270.624452] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Reconfiguring VM to attach interface {{(pid=63279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 2270.627698] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0a94e81-f7c8-4327-98cd-fcd07e3ebf1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.657899] env[63279]: DEBUG oslo_vmware.api [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087918, 'name': PowerOffVM_Task, 'duration_secs': 0.303887} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2270.658603] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2270.658797] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2270.659160] env[63279]: DEBUG oslo_vmware.api [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2270.659160] env[63279]: value = "task-2087920" [ 2270.659160] env[63279]: _type = "Task" [ 2270.659160] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2270.659588] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc5d899d-402b-4064-a4c2-3dc05e6a9334 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.670495] env[63279]: DEBUG oslo_vmware.api [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087920, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2270.711734] env[63279]: INFO nova.compute.manager [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Detaching volume abfd7860-7a69-49db-b638-22de4f3a1b94 [ 2270.722836] env[63279]: DEBUG nova.network.neutron [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Updating instance_info_cache with network_info: [{"id": "15cdfe62-d983-4e01-beb9-1947d51443e0", "address": "fa:16:3e:a4:65:02", "network": {"id": "f7000655-b20b-461d-9d08-f4cb8a85522e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-686033866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7674483744fe490b8cbe75532dfad77c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15cdfe62-d9", "ovs_interfaceid": "15cdfe62-d983-4e01-beb9-1947d51443e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2270.741919] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "6e452711-a79c-4560-b38f-9414c87e6683" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2270.742454] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "6e452711-a79c-4560-b38f-9414c87e6683" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2270.746368] env[63279]: INFO nova.virt.block_device [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Attempting to driver detach volume abfd7860-7a69-49db-b638-22de4f3a1b94 from mountpoint /dev/sdb [ 2270.747139] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2270.747457] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427756', 'volume_id': 'abfd7860-7a69-49db-b638-22de4f3a1b94', 'name': 'volume-abfd7860-7a69-49db-b638-22de4f3a1b94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9b98a316-71da-45fb-b895-553f179fe7d9', 'attached_at': '', 'detached_at': '', 'volume_id': 'abfd7860-7a69-49db-b638-22de4f3a1b94', 'serial': 'abfd7860-7a69-49db-b638-22de4f3a1b94'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2270.748943] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25968ca1-71ad-4ed7-a649-fc1df59f805e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.774596] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6aa1c8-28b4-49e7-8ed2-0c58af491a8e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.778390] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2270.778716] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2270.779008] env[63279]: DEBUG nova.objects.instance [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Lazy-loading 'resources' on Instance uuid ecec02e8-8ddf-4997-9547-ccee0db1938b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2270.785465] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f300a90-5742-4cf5-ac2e-94651fb76db6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.806982] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8363d698-0325-4d64-aaec-ed018f3eac42 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.822793] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] The volume has not been displaced from its original location: [datastore1] volume-abfd7860-7a69-49db-b638-22de4f3a1b94/volume-abfd7860-7a69-49db-b638-22de4f3a1b94.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2270.827973] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Reconfiguring VM instance instance-00000054 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2270.831608] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1266eb66-e9a5-40fb-8676-05b029e8cbd7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.851844] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087919, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2270.853429] env[63279]: DEBUG oslo_vmware.api [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2270.853429] env[63279]: value = "task-2087922" [ 2270.853429] env[63279]: _type = "Task" [ 2270.853429] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2270.861958] env[63279]: DEBUG oslo_vmware.api [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087922, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2270.999138] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2270.999138] env[63279]: DEBUG nova.compute.manager [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Instance network_info: |[{"id": "ee08f9b0-ce61-4fa5-bf67-3a97acafc55e", "address": "fa:16:3e:16:9a:ab", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee08f9b0-ce", "ovs_interfaceid": "ee08f9b0-ce61-4fa5-bf67-3a97acafc55e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2270.999562] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:9a:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ee08f9b0-ce61-4fa5-bf67-3a97acafc55e', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2271.007717] env[63279]: DEBUG oslo.service.loopingcall [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2271.008552] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2271.008552] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd7efe7c-b61f-4ed8-b24b-794e4201900b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.030062] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2271.030062] env[63279]: value = "task-2087923" [ 2271.030062] env[63279]: _type = "Task" [ 2271.030062] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2271.038907] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087923, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.071239] env[63279]: DEBUG nova.compute.manager [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Received event network-changed-f3d164be-d41c-4e5a-97f5-a50cb539a06a {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2271.071553] env[63279]: DEBUG nova.compute.manager [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Refreshing instance network info cache due to event network-changed-f3d164be-d41c-4e5a-97f5-a50cb539a06a. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2271.071895] env[63279]: DEBUG oslo_concurrency.lockutils [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] Acquiring lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2271.072147] env[63279]: DEBUG oslo_concurrency.lockutils [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] Acquired lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2271.072362] env[63279]: DEBUG nova.network.neutron [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Refreshing network info cache for port f3d164be-d41c-4e5a-97f5-a50cb539a06a {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2271.170749] env[63279]: DEBUG oslo_vmware.api [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087920, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.201232] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2271.201563] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2271.201828] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Deleting the datastore file [datastore1] 21aca39e-8513-49bd-93e9-0d101cee591f {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2271.202252] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-273e7b48-af5d-482d-8d55-4f533e0f36e5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.208874] env[63279]: DEBUG oslo_vmware.api [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2271.208874] env[63279]: value = "task-2087924" [ 2271.208874] env[63279]: _type = "Task" [ 2271.208874] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2271.217388] env[63279]: DEBUG oslo_vmware.api [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087924, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.226021] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Releasing lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2271.226776] env[63279]: DEBUG nova.compute.manager [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Instance network_info: |[{"id": "15cdfe62-d983-4e01-beb9-1947d51443e0", "address": "fa:16:3e:a4:65:02", "network": {"id": "f7000655-b20b-461d-9d08-f4cb8a85522e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-686033866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7674483744fe490b8cbe75532dfad77c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15cdfe62-d9", "ovs_interfaceid": "15cdfe62-d983-4e01-beb9-1947d51443e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2271.226922] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:65:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbe1725d-6711-4e92-9a4e-d4802651e7d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15cdfe62-d983-4e01-beb9-1947d51443e0', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2271.234380] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Creating folder: Project (7674483744fe490b8cbe75532dfad77c). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2271.235806] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-71bd0c4e-aaa6-4750-bc50-0a64e879c295 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.244611] env[63279]: DEBUG nova.compute.manager [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2271.248733] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Created folder: Project (7674483744fe490b8cbe75532dfad77c) in parent group-v427491. [ 2271.248915] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Creating folder: Instances. Parent ref: group-v427765. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2271.249439] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98ab9869-521c-4f7a-bd21-07841c871970 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.258567] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Created folder: Instances in parent group-v427765. [ 2271.258872] env[63279]: DEBUG oslo.service.loopingcall [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2271.259107] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2271.259387] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3ad25f7-0fad-43c9-b6e7-0a2fea1b7dfd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.280141] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2271.280141] env[63279]: value = "task-2087927" [ 2271.280141] env[63279]: _type = "Task" [ 2271.280141] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2271.293881] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087927, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.334672] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087919, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.365711] env[63279]: DEBUG oslo_vmware.api [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087922, 'name': ReconfigVM_Task, 'duration_secs': 0.212558} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2271.366090] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Reconfigured VM instance instance-00000054 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2271.374080] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0861c94b-3f7c-4d1c-8191-0d7b161744c5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.394972] env[63279]: DEBUG oslo_vmware.api [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2271.394972] env[63279]: value = "task-2087928" [ 2271.394972] env[63279]: _type = "Task" [ 2271.394972] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2271.408815] env[63279]: DEBUG oslo_vmware.api [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087928, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.539581] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087923, 'name': CreateVM_Task, 'duration_secs': 0.443557} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2271.543499] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2271.545026] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2271.545026] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2271.547031] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2271.547031] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d016f45-3434-4548-bc9a-d10da97e026f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.551112] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2271.551112] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52564716-f105-74a2-20f6-56e3a1fe02fb" [ 2271.551112] env[63279]: _type = "Task" [ 2271.551112] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2271.564150] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52564716-f105-74a2-20f6-56e3a1fe02fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.580014] env[63279]: DEBUG nova.compute.manager [req-80261eb2-9c15-42c3-884c-ecb5031a78bf req-5fd687f6-921e-4cbd-81e2-ded7fd846249 service nova] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Received event network-changed-ee08f9b0-ce61-4fa5-bf67-3a97acafc55e {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2271.580014] env[63279]: DEBUG nova.compute.manager [req-80261eb2-9c15-42c3-884c-ecb5031a78bf req-5fd687f6-921e-4cbd-81e2-ded7fd846249 service nova] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Refreshing instance network info cache due to event network-changed-ee08f9b0-ce61-4fa5-bf67-3a97acafc55e. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2271.580014] env[63279]: DEBUG oslo_concurrency.lockutils [req-80261eb2-9c15-42c3-884c-ecb5031a78bf req-5fd687f6-921e-4cbd-81e2-ded7fd846249 service nova] Acquiring lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2271.580014] env[63279]: DEBUG oslo_concurrency.lockutils [req-80261eb2-9c15-42c3-884c-ecb5031a78bf req-5fd687f6-921e-4cbd-81e2-ded7fd846249 service nova] Acquired lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2271.580014] env[63279]: DEBUG nova.network.neutron [req-80261eb2-9c15-42c3-884c-ecb5031a78bf req-5fd687f6-921e-4cbd-81e2-ded7fd846249 service nova] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Refreshing network info cache for port ee08f9b0-ce61-4fa5-bf67-3a97acafc55e {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2271.584704] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8704224-8844-462f-8c13-9f5e4cfd4a89 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.593780] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b56dc4-717d-404e-942d-f8eb1660ce94 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.633044] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569fac88-9cd7-47a7-8ca2-ba02a2d38312 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.641268] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d5ca77-a32e-4e3c-aefc-e09020cabafd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.655994] env[63279]: DEBUG nova.compute.provider_tree [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2271.671637] env[63279]: DEBUG oslo_vmware.api [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087920, 'name': ReconfigVM_Task, 'duration_secs': 0.889885} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2271.672208] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2271.672459] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Reconfigured VM to attach interface {{(pid=63279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 2271.719376] env[63279]: DEBUG oslo_vmware.api [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087924, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216085} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2271.719654] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2271.719846] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2271.720128] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2271.720340] env[63279]: INFO nova.compute.manager [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Took 1.60 seconds to destroy the instance on the hypervisor. [ 2271.720579] env[63279]: DEBUG oslo.service.loopingcall [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2271.720854] env[63279]: DEBUG nova.compute.manager [-] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2271.720854] env[63279]: DEBUG nova.network.neutron [-] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2271.767158] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2271.789954] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087927, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.830504] env[63279]: DEBUG nova.network.neutron [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updated VIF entry in instance network info cache for port f3d164be-d41c-4e5a-97f5-a50cb539a06a. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2271.830504] env[63279]: DEBUG nova.network.neutron [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updating instance_info_cache with network_info: [{"id": "0bd52488-d960-42c9-8077-fc8fe9eda956", "address": "fa:16:3e:16:45:3c", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bd52488-d9", "ovs_interfaceid": "0bd52488-d960-42c9-8077-fc8fe9eda956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f3d164be-d41c-4e5a-97f5-a50cb539a06a", "address": "fa:16:3e:35:3a:40", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3d164be-d4", "ovs_interfaceid": "f3d164be-d41c-4e5a-97f5-a50cb539a06a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2271.837375] env[63279]: DEBUG oslo_vmware.api [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087919, 'name': PowerOnVM_Task, 'duration_secs': 1.238783} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2271.837677] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2271.837931] env[63279]: INFO nova.compute.manager [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Took 5.89 seconds to spawn the instance on the hypervisor. [ 2271.838582] env[63279]: DEBUG nova.compute.manager [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2271.839269] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485ecc92-4d4a-4c41-ba53-47d1a26706a2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.905069] env[63279]: DEBUG oslo_vmware.api [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087928, 'name': ReconfigVM_Task, 'duration_secs': 0.168513} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2271.905422] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427756', 'volume_id': 'abfd7860-7a69-49db-b638-22de4f3a1b94', 'name': 'volume-abfd7860-7a69-49db-b638-22de4f3a1b94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9b98a316-71da-45fb-b895-553f179fe7d9', 'attached_at': '', 'detached_at': '', 'volume_id': 'abfd7860-7a69-49db-b638-22de4f3a1b94', 'serial': 'abfd7860-7a69-49db-b638-22de4f3a1b94'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2272.065060] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52564716-f105-74a2-20f6-56e3a1fe02fb, 'name': SearchDatastore_Task, 'duration_secs': 0.015379} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2272.065060] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2272.065060] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2272.065060] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2272.065478] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2272.065540] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2272.065823] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48a90197-4270-4123-bf41-98e013c3ce18 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.077000] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2272.077264] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2272.077987] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c36f10e9-18ab-4992-8ca6-043b60131864 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.085328] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2272.085328] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52eb0914-5517-c2cb-8720-988a7380f2f1" [ 2272.085328] env[63279]: _type = "Task" [ 2272.085328] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2272.092576] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52eb0914-5517-c2cb-8720-988a7380f2f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2272.160838] env[63279]: DEBUG nova.scheduler.client.report [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2272.177597] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1efaec50-9d70-4953-a0fc-80de35ec2073 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 14.550s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2272.281630] env[63279]: DEBUG nova.network.neutron [req-80261eb2-9c15-42c3-884c-ecb5031a78bf req-5fd687f6-921e-4cbd-81e2-ded7fd846249 service nova] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updated VIF entry in instance network info cache for port ee08f9b0-ce61-4fa5-bf67-3a97acafc55e. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2272.282038] env[63279]: DEBUG nova.network.neutron [req-80261eb2-9c15-42c3-884c-ecb5031a78bf req-5fd687f6-921e-4cbd-81e2-ded7fd846249 service nova] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating instance_info_cache with network_info: [{"id": "ee08f9b0-ce61-4fa5-bf67-3a97acafc55e", "address": "fa:16:3e:16:9a:ab", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee08f9b0-ce", "ovs_interfaceid": "ee08f9b0-ce61-4fa5-bf67-3a97acafc55e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2272.293096] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087927, 'name': CreateVM_Task, 'duration_secs': 0.620445} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2272.293948] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2272.294620] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2272.294788] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2272.295128] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2272.295625] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fe13504-5b7b-4eab-931c-7cdff3f9477a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.299958] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2272.299958] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5233538b-17bf-bfab-a1d0-0bded9609dff" [ 2272.299958] env[63279]: _type = "Task" [ 2272.299958] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2272.307837] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5233538b-17bf-bfab-a1d0-0bded9609dff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2272.333670] env[63279]: DEBUG oslo_concurrency.lockutils [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] Releasing lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2272.333961] env[63279]: DEBUG nova.compute.manager [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Received event network-vif-deleted-488726d1-1ee0-451b-be4a-b5de44f13dda {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2272.334163] env[63279]: DEBUG nova.compute.manager [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Received event network-vif-plugged-15cdfe62-d983-4e01-beb9-1947d51443e0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2272.334389] env[63279]: DEBUG oslo_concurrency.lockutils [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] Acquiring lock "19e10ee4-99d1-44b9-9354-4c162d541a1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2272.334610] env[63279]: DEBUG oslo_concurrency.lockutils [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2272.334782] env[63279]: DEBUG oslo_concurrency.lockutils [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2272.334951] env[63279]: DEBUG nova.compute.manager [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] No waiting events found dispatching network-vif-plugged-15cdfe62-d983-4e01-beb9-1947d51443e0 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2272.335137] env[63279]: WARNING nova.compute.manager [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Received unexpected event network-vif-plugged-15cdfe62-d983-4e01-beb9-1947d51443e0 for instance with vm_state building and task_state spawning. [ 2272.335310] env[63279]: DEBUG nova.compute.manager [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Received event network-changed-15cdfe62-d983-4e01-beb9-1947d51443e0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2272.335471] env[63279]: DEBUG nova.compute.manager [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Refreshing instance network info cache due to event network-changed-15cdfe62-d983-4e01-beb9-1947d51443e0. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2272.335654] env[63279]: DEBUG oslo_concurrency.lockutils [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] Acquiring lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2272.335793] env[63279]: DEBUG oslo_concurrency.lockutils [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] Acquired lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2272.335953] env[63279]: DEBUG nova.network.neutron [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Refreshing network info cache for port 15cdfe62-d983-4e01-beb9-1947d51443e0 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2272.356802] env[63279]: INFO nova.compute.manager [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Took 13.32 seconds to build instance. [ 2272.448783] env[63279]: DEBUG nova.objects.instance [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lazy-loading 'flavor' on Instance uuid 9b98a316-71da-45fb-b895-553f179fe7d9 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2272.481224] env[63279]: DEBUG nova.network.neutron [-] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2272.596441] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52eb0914-5517-c2cb-8720-988a7380f2f1, 'name': SearchDatastore_Task, 'duration_secs': 0.023838} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2272.597282] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e85b7473-44b7-4b16-ab93-18cf95a9f054 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.602450] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2272.602450] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]525318ba-fb43-4e07-197b-707f51a9b036" [ 2272.602450] env[63279]: _type = "Task" [ 2272.602450] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2272.610256] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525318ba-fb43-4e07-197b-707f51a9b036, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2272.666241] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.887s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2272.668861] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.902s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2272.670504] env[63279]: INFO nova.compute.claims [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2272.691390] env[63279]: INFO nova.scheduler.client.report [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Deleted allocations for instance ecec02e8-8ddf-4997-9547-ccee0db1938b [ 2272.787319] env[63279]: DEBUG oslo_concurrency.lockutils [req-80261eb2-9c15-42c3-884c-ecb5031a78bf req-5fd687f6-921e-4cbd-81e2-ded7fd846249 service nova] Releasing lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2272.811317] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5233538b-17bf-bfab-a1d0-0bded9609dff, 'name': SearchDatastore_Task, 'duration_secs': 0.051712} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2272.811622] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2272.811857] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2272.812080] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2272.858451] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f46fd062-ccb3-4cde-81f0-368a25124d21 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Lock "69c88844-84c7-4272-a2c4-051f1499df84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.826s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2272.984120] env[63279]: INFO nova.compute.manager [-] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Took 1.26 seconds to deallocate network for instance. [ 2273.029633] env[63279]: INFO nova.compute.manager [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Rebuilding instance [ 2273.044985] env[63279]: DEBUG nova.network.neutron [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Updated VIF entry in instance network info cache for port 15cdfe62-d983-4e01-beb9-1947d51443e0. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2273.045369] env[63279]: DEBUG nova.network.neutron [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Updating instance_info_cache with network_info: [{"id": "15cdfe62-d983-4e01-beb9-1947d51443e0", "address": "fa:16:3e:a4:65:02", "network": {"id": "f7000655-b20b-461d-9d08-f4cb8a85522e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-686033866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7674483744fe490b8cbe75532dfad77c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15cdfe62-d9", "ovs_interfaceid": "15cdfe62-d983-4e01-beb9-1947d51443e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2273.071218] env[63279]: DEBUG nova.compute.manager [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2273.071889] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8141c2-8d7a-4a9c-9726-6f6f7ae5d623 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.113618] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525318ba-fb43-4e07-197b-707f51a9b036, 'name': SearchDatastore_Task, 'duration_secs': 0.02683} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2273.113881] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2273.114152] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] d0b8c9dd-c683-4f3a-b819-d9d57d96636b/d0b8c9dd-c683-4f3a-b819-d9d57d96636b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2273.114423] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2273.114603] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2273.114810] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4b30410-fff0-414f-8636-68044c49773d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.116691] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13db37e8-dc36-4b4a-ac03-9f65337963ef {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.123941] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2273.123941] env[63279]: value = "task-2087930" [ 2273.123941] env[63279]: _type = "Task" [ 2273.123941] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2273.125048] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2273.125264] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2273.128609] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c341930b-3bd1-45a6-9605-f8afcf22807b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.135737] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087930, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.136952] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2273.136952] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b28d44-f0f5-e681-a325-7615d95a48c7" [ 2273.136952] env[63279]: _type = "Task" [ 2273.136952] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2273.144480] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b28d44-f0f5-e681-a325-7615d95a48c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.198753] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d1fd8bb8-3b35-4608-ada3-9d7b5c154b26 tempest-ImagesNegativeTestJSON-354022530 tempest-ImagesNegativeTestJSON-354022530-project-member] Lock "ecec02e8-8ddf-4997-9547-ccee0db1938b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.890s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2273.458741] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ee33c015-17af-469f-8aaf-fbc4c245a33d tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "9b98a316-71da-45fb-b895-553f179fe7d9" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.250s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2273.490287] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2273.549733] env[63279]: DEBUG oslo_concurrency.lockutils [req-184b818f-7418-4485-a9d9-906dd34981f8 req-58943257-9780-4f37-a31f-83d8f7b033b9 service nova] Releasing lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2273.615187] env[63279]: DEBUG nova.compute.manager [req-e5d91347-7a90-4c5c-8369-357053031025 req-e8f68545-23d9-4ee0-8da5-71e50e0d0b11 service nova] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Received event network-vif-deleted-d6d02bd7-d44f-434c-8e3f-5bdd56aed938 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2273.643259] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087930, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505454} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2273.649714] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] d0b8c9dd-c683-4f3a-b819-d9d57d96636b/d0b8c9dd-c683-4f3a-b819-d9d57d96636b.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2273.649987] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2273.650901] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dde9d475-5889-4e5c-87c8-72a308deac56 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.666258] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b28d44-f0f5-e681-a325-7615d95a48c7, 'name': SearchDatastore_Task, 'duration_secs': 0.015711} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2273.666258] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2273.666258] env[63279]: value = "task-2087931" [ 2273.666258] env[63279]: _type = "Task" [ 2273.666258] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2273.666258] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abdab839-b043-421c-b1b6-f357a4d37a55 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.674861] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2273.674861] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5264dec1-8f4b-1f30-ba74-45a12b31bcab" [ 2273.674861] env[63279]: _type = "Task" [ 2273.674861] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2273.677970] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087931, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.694209] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5264dec1-8f4b-1f30-ba74-45a12b31bcab, 'name': SearchDatastore_Task, 'duration_secs': 0.01224} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2273.694769] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2273.695224] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 19e10ee4-99d1-44b9-9354-4c162d541a1f/19e10ee4-99d1-44b9-9354-4c162d541a1f.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2273.695604] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8bfadb2a-206a-4a0b-a0a1-29dcf782e0ab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.702352] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2273.702352] env[63279]: value = "task-2087932" [ 2273.702352] env[63279]: _type = "Task" [ 2273.702352] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2273.711037] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2087932, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.956062] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e6636c-fa06-475f-8569-51be4d818416 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.964791] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1589cc9a-c053-4a02-929f-9f89995298db {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.001038] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c91595-3dcc-4d6e-9a2b-1f48a7750806 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.007205] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ff2f11-79be-48cf-8e8d-fea6a169fdb8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.028190] env[63279]: DEBUG nova.compute.provider_tree [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2274.047250] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2274.047523] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427763', 'volume_id': '34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9', 'name': 'volume-34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed', 'attached_at': '', 'detached_at': '', 'volume_id': '34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9', 'serial': '34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2274.048481] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cfa612-d789-4364-9856-6a56ac50cd2d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.068935] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa5a622-fd4d-4039-86dc-ecc361af7e7e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.088930] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2274.096598] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] volume-34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9/volume-34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2274.096979] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2bdfad8-5ef4-4b53-83f8-534fc9d3c0d8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.099334] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-243db0cc-1ce6-471a-a410-f1391872b2d9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.122883] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2274.122883] env[63279]: value = "task-2087933" [ 2274.122883] env[63279]: _type = "Task" [ 2274.122883] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2274.122883] env[63279]: DEBUG oslo_vmware.api [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2274.122883] env[63279]: value = "task-2087934" [ 2274.122883] env[63279]: _type = "Task" [ 2274.122883] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2274.132398] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087933, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.136436] env[63279]: DEBUG oslo_vmware.api [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087934, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.176736] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087931, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070047} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2274.177187] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2274.178746] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8515bf-e372-4406-ac9b-136c4555f176 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.208275] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] d0b8c9dd-c683-4f3a-b819-d9d57d96636b/d0b8c9dd-c683-4f3a-b819-d9d57d96636b.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2274.208275] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2494ff02-ca89-4b63-8358-4619e683b638 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.233949] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2087932, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.240074] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2274.240074] env[63279]: value = "task-2087935" [ 2274.240074] env[63279]: _type = "Task" [ 2274.240074] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2274.246438] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087935, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.325216] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "3de52a94-c1de-4b37-985c-9101417260e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2274.325537] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "3de52a94-c1de-4b37-985c-9101417260e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2274.439270] env[63279]: DEBUG oslo_concurrency.lockutils [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "interface-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-34ef38ab-d2fc-4fee-9782-e01851e13c39" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2274.439555] env[63279]: DEBUG oslo_concurrency.lockutils [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-34ef38ab-d2fc-4fee-9782-e01851e13c39" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2274.439926] env[63279]: DEBUG nova.objects.instance [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'flavor' on Instance uuid 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2274.532582] env[63279]: DEBUG nova.scheduler.client.report [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2274.641206] env[63279]: DEBUG oslo_vmware.api [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087934, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.641644] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087933, 'name': PowerOffVM_Task, 'duration_secs': 0.155074} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2274.641970] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2274.642228] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2274.642980] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b938750-8863-4fd0-b8cf-5f6f94b90dc2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.650629] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2274.650908] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e60c892-d903-486a-b5cd-4ddb281ea6a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.677335] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2274.677573] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2274.677759] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Deleting the datastore file [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2274.678894] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-352b5c39-5612-46b5-8201-876543bf1101 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.687394] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2274.687394] env[63279]: value = "task-2087937" [ 2274.687394] env[63279]: _type = "Task" [ 2274.687394] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2274.697948] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087937, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.718362] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2087932, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.747277] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087935, 'name': ReconfigVM_Task, 'duration_secs': 0.310241} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2274.747638] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Reconfigured VM instance instance-00000064 to attach disk [datastore1] d0b8c9dd-c683-4f3a-b819-d9d57d96636b/d0b8c9dd-c683-4f3a-b819-d9d57d96636b.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2274.748374] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-db32c7fa-719b-43d5-9709-10a4e3fa69a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.756522] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2274.756522] env[63279]: value = "task-2087938" [ 2274.756522] env[63279]: _type = "Task" [ 2274.756522] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2274.765856] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087938, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.828638] env[63279]: DEBUG nova.compute.manager [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2275.038593] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2275.039152] env[63279]: DEBUG nova.compute.manager [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2275.042049] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.552s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2275.042294] env[63279]: DEBUG nova.objects.instance [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lazy-loading 'resources' on Instance uuid 21aca39e-8513-49bd-93e9-0d101cee591f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2275.115942] env[63279]: DEBUG nova.objects.instance [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'pci_requests' on Instance uuid 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2275.140601] env[63279]: DEBUG oslo_vmware.api [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087934, 'name': ReconfigVM_Task, 'duration_secs': 0.620633} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2275.140601] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Reconfigured VM instance instance-00000053 to attach disk [datastore1] volume-34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9/volume-34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2275.145534] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-228d07a2-e4c8-48bb-b92b-7ff11355dc53 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.163338] env[63279]: DEBUG oslo_vmware.api [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2275.163338] env[63279]: value = "task-2087939" [ 2275.163338] env[63279]: _type = "Task" [ 2275.163338] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2275.171939] env[63279]: DEBUG oslo_vmware.api [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087939, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2275.198821] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087937, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206299} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2275.199151] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2275.199364] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2275.199561] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2275.217100] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2087932, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2275.267679] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087938, 'name': Rename_Task, 'duration_secs': 0.254626} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2275.267975] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2275.268286] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0cfdecbd-564b-45ab-ad96-0485ce1deff3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.275316] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2275.275316] env[63279]: value = "task-2087940" [ 2275.275316] env[63279]: _type = "Task" [ 2275.275316] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2275.284136] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087940, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2275.352816] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2275.546171] env[63279]: DEBUG nova.compute.utils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2275.552078] env[63279]: DEBUG nova.compute.manager [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2275.552078] env[63279]: DEBUG nova.network.neutron [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2275.606134] env[63279]: DEBUG nova.policy [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55199c771de04067a936194078ef99f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4fcef39e334249afb9636455802059c5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2275.618191] env[63279]: DEBUG nova.objects.base [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Object Instance<9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6> lazy-loaded attributes: flavor,pci_requests {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2275.618414] env[63279]: DEBUG nova.network.neutron [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2275.677416] env[63279]: DEBUG oslo_vmware.api [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087939, 'name': ReconfigVM_Task, 'duration_secs': 0.162035} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2275.677416] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427763', 'volume_id': '34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9', 'name': 'volume-34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed', 'attached_at': '', 'detached_at': '', 'volume_id': '34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9', 'serial': '34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2275.720901] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2087932, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.565789} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2275.724654] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 19e10ee4-99d1-44b9-9354-4c162d541a1f/19e10ee4-99d1-44b9-9354-4c162d541a1f.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2275.724916] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2275.728708] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a156d3a-f023-45cd-a19a-c88f844453fb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.741889] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2275.741889] env[63279]: value = "task-2087941" [ 2275.741889] env[63279]: _type = "Task" [ 2275.741889] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2275.753396] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2087941, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2275.789502] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087940, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2275.846950] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56143548-f3e5-4365-b46a-9272608704ae {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.860030] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31619c4a-0492-4341-892d-a91eb4f84ef0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.905061] env[63279]: DEBUG nova.network.neutron [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Successfully created port: fffc97fe-3975-45bb-8c80-f12056750eb4 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2275.908040] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51dfae1-2b81-498e-84e5-47168ade081f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.916746] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375280e7-8d59-419f-adb1-f04b6995406d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.934362] env[63279]: DEBUG nova.compute.provider_tree [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2275.969192] env[63279]: DEBUG nova.policy [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6343b207f7294f5fa2a8111940083fb0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5b21bc5072e4945a19a782dd9561709', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2276.052165] env[63279]: DEBUG nova.compute.manager [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2276.247667] env[63279]: DEBUG nova.virt.hardware [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2276.247667] env[63279]: DEBUG nova.virt.hardware [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2276.247828] env[63279]: DEBUG nova.virt.hardware [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2276.248032] env[63279]: DEBUG nova.virt.hardware [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2276.248177] env[63279]: DEBUG nova.virt.hardware [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2276.248327] env[63279]: DEBUG nova.virt.hardware [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2276.248541] env[63279]: DEBUG nova.virt.hardware [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2276.248705] env[63279]: DEBUG nova.virt.hardware [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2276.248870] env[63279]: DEBUG nova.virt.hardware [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2276.249055] env[63279]: DEBUG nova.virt.hardware [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2276.249238] env[63279]: DEBUG nova.virt.hardware [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2276.250385] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aabffae-2483-44e1-9fda-390984815e8b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.259045] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2087941, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093973} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2276.261315] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2276.262313] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c207d0d-070a-46a7-a1b7-f380e9d89cb6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.265935] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198a0fc3-0630-4116-8c87-e3e93a1d3bb9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.289888] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 19e10ee4-99d1-44b9-9354-4c162d541a1f/19e10ee4-99d1-44b9-9354-4c162d541a1f.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2276.302517] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c32999e5-06cf-447b-aec8-15ce748482ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.316605] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Instance VIF info [] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2276.322364] env[63279]: DEBUG oslo.service.loopingcall [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2276.322984] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2276.323952] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4f9b5be-4dfc-4c59-a4d9-082e4eb099fb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.341074] env[63279]: DEBUG oslo_vmware.api [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087940, 'name': PowerOnVM_Task, 'duration_secs': 0.519672} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2276.341074] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2276.341074] env[63279]: value = "task-2087942" [ 2276.341074] env[63279]: _type = "Task" [ 2276.341074] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2276.341074] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2276.341074] env[63279]: INFO nova.compute.manager [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Took 8.06 seconds to spawn the instance on the hypervisor. [ 2276.341074] env[63279]: DEBUG nova.compute.manager [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2276.341734] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b9a49d-3ead-497d-911a-dea6f1652447 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.347867] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2276.347867] env[63279]: value = "task-2087943" [ 2276.347867] env[63279]: _type = "Task" [ 2276.347867] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2276.355437] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2087942, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.366860] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087943, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.438024] env[63279]: DEBUG nova.scheduler.client.report [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2276.720522] env[63279]: DEBUG nova.objects.instance [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lazy-loading 'flavor' on Instance uuid 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2276.851241] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2087942, 'name': ReconfigVM_Task, 'duration_secs': 0.302563} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2276.854369] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 19e10ee4-99d1-44b9-9354-4c162d541a1f/19e10ee4-99d1-44b9-9354-4c162d541a1f.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2276.854996] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49776b5a-aaab-482a-b70c-427ad0d20ea1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.865858] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087943, 'name': CreateVM_Task, 'duration_secs': 0.350175} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2276.867284] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2276.867624] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2276.867624] env[63279]: value = "task-2087944" [ 2276.867624] env[63279]: _type = "Task" [ 2276.867624] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2276.869417] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2276.869585] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2276.869895] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2276.870486] env[63279]: INFO nova.compute.manager [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Took 16.66 seconds to build instance. [ 2276.871341] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e840d52e-4685-4a3e-9822-a1aef3fd0225 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.879570] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2276.879570] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b66a78-da42-1519-5282-860b3776a770" [ 2276.879570] env[63279]: _type = "Task" [ 2276.879570] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2276.883229] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2087944, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.892469] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b66a78-da42-1519-5282-860b3776a770, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.943283] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.901s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2276.945784] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.593s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2276.947449] env[63279]: INFO nova.compute.claims [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2276.966033] env[63279]: INFO nova.scheduler.client.report [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Deleted allocations for instance 21aca39e-8513-49bd-93e9-0d101cee591f [ 2277.067234] env[63279]: DEBUG nova.compute.manager [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2277.096196] env[63279]: DEBUG nova.virt.hardware [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2277.097025] env[63279]: DEBUG nova.virt.hardware [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2277.097025] env[63279]: DEBUG nova.virt.hardware [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2277.097025] env[63279]: DEBUG nova.virt.hardware [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2277.097025] env[63279]: DEBUG nova.virt.hardware [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2277.097414] env[63279]: DEBUG nova.virt.hardware [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2277.097772] env[63279]: DEBUG nova.virt.hardware [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2277.097823] env[63279]: DEBUG nova.virt.hardware [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2277.098036] env[63279]: DEBUG nova.virt.hardware [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2277.098208] env[63279]: DEBUG nova.virt.hardware [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2277.098397] env[63279]: DEBUG nova.virt.hardware [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2277.099383] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b4f3fd-5fe8-4e4b-835d-9120e641de82 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.107584] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7b49be-4553-4f1c-a97e-cc0e9050e29c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.225608] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5c88ffbb-5156-4ee9-8ee2-94ea57b56588 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.292s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.376937] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4c6686f2-b4a3-48f1-a3b8-2f0ad9d067b3 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.172s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.381582] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2087944, 'name': Rename_Task, 'duration_secs': 0.143104} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2277.382242] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2277.382602] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dcc03e7c-acb1-4a15-8b66-eeaff6ae54cf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.394393] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b66a78-da42-1519-5282-860b3776a770, 'name': SearchDatastore_Task, 'duration_secs': 0.01499} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2277.395857] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2277.396159] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2277.396574] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2277.396778] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2277.397018] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2277.397382] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2277.397382] env[63279]: value = "task-2087945" [ 2277.397382] env[63279]: _type = "Task" [ 2277.397382] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2277.397576] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c30d81a4-bbaf-4868-9f91-46e7dc61d652 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.410329] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2087945, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2277.411591] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2277.411773] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2277.412661] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf50f32d-66da-4969-ab04-7571df78b126 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.418244] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2277.418244] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529b527c-2dea-a44a-22d3-75fd15adc168" [ 2277.418244] env[63279]: _type = "Task" [ 2277.418244] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2277.424501] env[63279]: DEBUG nova.network.neutron [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Successfully updated port: fffc97fe-3975-45bb-8c80-f12056750eb4 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2277.430263] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529b527c-2dea-a44a-22d3-75fd15adc168, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2277.473442] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7abee42c-e6b2-4b7c-9763-4983fc7c6675 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "21aca39e-8513-49bd-93e9-0d101cee591f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.858s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.498314] env[63279]: DEBUG nova.compute.manager [req-811e2bd8-38b9-40a0-8600-3f537a8ba893 req-c0b3a671-a1bc-4893-8187-508b4ab4a8c8 service nova] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Received event network-vif-plugged-fffc97fe-3975-45bb-8c80-f12056750eb4 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2277.498632] env[63279]: DEBUG oslo_concurrency.lockutils [req-811e2bd8-38b9-40a0-8600-3f537a8ba893 req-c0b3a671-a1bc-4893-8187-508b4ab4a8c8 service nova] Acquiring lock "6e452711-a79c-4560-b38f-9414c87e6683-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2277.498817] env[63279]: DEBUG oslo_concurrency.lockutils [req-811e2bd8-38b9-40a0-8600-3f537a8ba893 req-c0b3a671-a1bc-4893-8187-508b4ab4a8c8 service nova] Lock "6e452711-a79c-4560-b38f-9414c87e6683-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2277.498975] env[63279]: DEBUG oslo_concurrency.lockutils [req-811e2bd8-38b9-40a0-8600-3f537a8ba893 req-c0b3a671-a1bc-4893-8187-508b4ab4a8c8 service nova] Lock "6e452711-a79c-4560-b38f-9414c87e6683-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.499177] env[63279]: DEBUG nova.compute.manager [req-811e2bd8-38b9-40a0-8600-3f537a8ba893 req-c0b3a671-a1bc-4893-8187-508b4ab4a8c8 service nova] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] No waiting events found dispatching network-vif-plugged-fffc97fe-3975-45bb-8c80-f12056750eb4 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2277.499377] env[63279]: WARNING nova.compute.manager [req-811e2bd8-38b9-40a0-8600-3f537a8ba893 req-c0b3a671-a1bc-4893-8187-508b4ab4a8c8 service nova] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Received unexpected event network-vif-plugged-fffc97fe-3975-45bb-8c80-f12056750eb4 for instance with vm_state building and task_state spawning. [ 2277.506018] env[63279]: INFO nova.compute.manager [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Rescuing [ 2277.506324] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2277.506501] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2277.506671] env[63279]: DEBUG nova.network.neutron [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2277.632394] env[63279]: DEBUG nova.compute.manager [req-6210be34-6998-4049-a1f9-1410c648847f req-f904fc9b-4e14-4cda-be75-dfc636f0908b service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Received event network-vif-plugged-34ef38ab-d2fc-4fee-9782-e01851e13c39 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2277.632609] env[63279]: DEBUG oslo_concurrency.lockutils [req-6210be34-6998-4049-a1f9-1410c648847f req-f904fc9b-4e14-4cda-be75-dfc636f0908b service nova] Acquiring lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2277.632827] env[63279]: DEBUG oslo_concurrency.lockutils [req-6210be34-6998-4049-a1f9-1410c648847f req-f904fc9b-4e14-4cda-be75-dfc636f0908b service nova] Lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2277.633026] env[63279]: DEBUG oslo_concurrency.lockutils [req-6210be34-6998-4049-a1f9-1410c648847f req-f904fc9b-4e14-4cda-be75-dfc636f0908b service nova] Lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.636479] env[63279]: DEBUG nova.compute.manager [req-6210be34-6998-4049-a1f9-1410c648847f req-f904fc9b-4e14-4cda-be75-dfc636f0908b service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] No waiting events found dispatching network-vif-plugged-34ef38ab-d2fc-4fee-9782-e01851e13c39 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2277.636797] env[63279]: WARNING nova.compute.manager [req-6210be34-6998-4049-a1f9-1410c648847f req-f904fc9b-4e14-4cda-be75-dfc636f0908b service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Received unexpected event network-vif-plugged-34ef38ab-d2fc-4fee-9782-e01851e13c39 for instance with vm_state active and task_state None. [ 2277.680658] env[63279]: DEBUG nova.network.neutron [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Successfully updated port: 34ef38ab-d2fc-4fee-9782-e01851e13c39 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2277.909499] env[63279]: DEBUG oslo_vmware.api [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2087945, 'name': PowerOnVM_Task, 'duration_secs': 0.459302} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2277.909808] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2277.910020] env[63279]: INFO nova.compute.manager [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Took 14.31 seconds to spawn the instance on the hypervisor. [ 2277.910235] env[63279]: DEBUG nova.compute.manager [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2277.911046] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7e0573-1282-495f-b640-e87ed3e74506 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.928406] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529b527c-2dea-a44a-22d3-75fd15adc168, 'name': SearchDatastore_Task, 'duration_secs': 0.019535} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2277.929204] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ca1c85f-db10-4930-84b7-cfbdeb652269 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.931780] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "refresh_cache-6e452711-a79c-4560-b38f-9414c87e6683" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2277.931922] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "refresh_cache-6e452711-a79c-4560-b38f-9414c87e6683" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2277.932087] env[63279]: DEBUG nova.network.neutron [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2277.936509] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2277.936509] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52dd5451-b815-070d-d3f0-2c7ae53b8d2a" [ 2277.936509] env[63279]: _type = "Task" [ 2277.936509] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2277.944939] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52dd5451-b815-070d-d3f0-2c7ae53b8d2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2277.954158] env[63279]: DEBUG nova.compute.manager [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Stashing vm_state: active {{(pid=63279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2278.184232] env[63279]: DEBUG oslo_concurrency.lockutils [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2278.184469] env[63279]: DEBUG oslo_concurrency.lockutils [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2278.184660] env[63279]: DEBUG nova.network.neutron [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2278.204426] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "59ad6180-e561-41e3-86e4-37fc20819578" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2278.204677] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "59ad6180-e561-41e3-86e4-37fc20819578" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2278.204896] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "59ad6180-e561-41e3-86e4-37fc20819578-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2278.205138] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "59ad6180-e561-41e3-86e4-37fc20819578-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2278.205399] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "59ad6180-e561-41e3-86e4-37fc20819578-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2278.207722] env[63279]: INFO nova.compute.manager [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Terminating instance [ 2278.228282] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50342852-e058-4bc9-8857-d7d04cc0b122 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.238578] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ce4be2-7f09-4704-b427-f9c53797b4f7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.274852] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22848a2-9bfd-4f6e-b357-0cee7a890f68 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.283551] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea958d8-6b38-4837-a172-8fb16d908185 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.298192] env[63279]: DEBUG nova.compute.provider_tree [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2278.329668] env[63279]: DEBUG nova.network.neutron [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Updating instance_info_cache with network_info: [{"id": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "address": "fa:16:3e:68:12:c8", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f12bb0d-ee", "ovs_interfaceid": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2278.432838] env[63279]: INFO nova.compute.manager [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Took 19.71 seconds to build instance. [ 2278.450405] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52dd5451-b815-070d-d3f0-2c7ae53b8d2a, 'name': SearchDatastore_Task, 'duration_secs': 0.015761} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2278.450726] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2278.451017] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84/69c88844-84c7-4272-a2c4-051f1499df84.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2278.451339] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6ade06f-7603-4b0a-9db4-e5c1841a41b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.462023] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2278.462023] env[63279]: value = "task-2087946" [ 2278.462023] env[63279]: _type = "Task" [ 2278.462023] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2278.468656] env[63279]: DEBUG nova.network.neutron [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2278.476358] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087946, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2278.477472] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2278.669657] env[63279]: DEBUG nova.network.neutron [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Updating instance_info_cache with network_info: [{"id": "fffc97fe-3975-45bb-8c80-f12056750eb4", "address": "fa:16:3e:c1:43:29", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfffc97fe-39", "ovs_interfaceid": "fffc97fe-3975-45bb-8c80-f12056750eb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2278.712078] env[63279]: DEBUG nova.compute.manager [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2278.712456] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2278.713653] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f51eb79-b89f-4f40-af2a-c243039c629b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.724556] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2278.724934] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46a757c5-edb3-47ab-9c1e-daf8e73383c5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.732595] env[63279]: DEBUG oslo_vmware.api [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2278.732595] env[63279]: value = "task-2087947" [ 2278.732595] env[63279]: _type = "Task" [ 2278.732595] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2278.742745] env[63279]: DEBUG oslo_vmware.api [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087947, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2278.744608] env[63279]: WARNING nova.network.neutron [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] e0e614b7-de4b-485e-8824-582faae8febd already exists in list: networks containing: ['e0e614b7-de4b-485e-8824-582faae8febd']. ignoring it [ 2278.744801] env[63279]: WARNING nova.network.neutron [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] e0e614b7-de4b-485e-8824-582faae8febd already exists in list: networks containing: ['e0e614b7-de4b-485e-8824-582faae8febd']. ignoring it [ 2278.802094] env[63279]: DEBUG nova.scheduler.client.report [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2278.831231] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2278.937504] env[63279]: DEBUG oslo_concurrency.lockutils [None req-284fb117-637f-4fa4-b022-b8d4dd30d5e2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.222s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2278.974758] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087946, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.172921] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "refresh_cache-6e452711-a79c-4560-b38f-9414c87e6683" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2279.173499] env[63279]: DEBUG nova.compute.manager [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Instance network_info: |[{"id": "fffc97fe-3975-45bb-8c80-f12056750eb4", "address": "fa:16:3e:c1:43:29", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfffc97fe-39", "ovs_interfaceid": "fffc97fe-3975-45bb-8c80-f12056750eb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2279.174017] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:43:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9fa933df-d66f-485e-8cf9-eda7f1a7f283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fffc97fe-3975-45bb-8c80-f12056750eb4', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2279.182171] env[63279]: DEBUG oslo.service.loopingcall [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2279.182658] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2279.182968] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98346354-e073-4f11-b057-337e820d0b9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.207773] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2279.207773] env[63279]: value = "task-2087948" [ 2279.207773] env[63279]: _type = "Task" [ 2279.207773] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2279.215193] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087948, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.242266] env[63279]: DEBUG oslo_vmware.api [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087947, 'name': PowerOffVM_Task, 'duration_secs': 0.352244} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2279.244849] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2279.245111] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2279.245416] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0609842-cdcd-491c-a0fa-96c8c556c21c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.310934] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2279.311950] env[63279]: DEBUG nova.compute.manager [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2279.317120] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.840s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2279.329241] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2279.329241] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2279.329241] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Deleting the datastore file [datastore1] 59ad6180-e561-41e3-86e4-37fc20819578 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2279.329514] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1a57a78-bbe0-4ed3-84e6-639b7e6a5807 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.336745] env[63279]: DEBUG oslo_vmware.api [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for the task: (returnval){ [ 2279.336745] env[63279]: value = "task-2087950" [ 2279.336745] env[63279]: _type = "Task" [ 2279.336745] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2279.345921] env[63279]: DEBUG oslo_vmware.api [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087950, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.368515] env[63279]: DEBUG nova.network.neutron [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updating instance_info_cache with network_info: [{"id": "0bd52488-d960-42c9-8077-fc8fe9eda956", "address": "fa:16:3e:16:45:3c", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bd52488-d9", "ovs_interfaceid": "0bd52488-d960-42c9-8077-fc8fe9eda956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f3d164be-d41c-4e5a-97f5-a50cb539a06a", "address": "fa:16:3e:35:3a:40", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3d164be-d4", "ovs_interfaceid": "f3d164be-d41c-4e5a-97f5-a50cb539a06a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "34ef38ab-d2fc-4fee-9782-e01851e13c39", "address": "fa:16:3e:13:bf:29", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34ef38ab-d2", "ovs_interfaceid": "34ef38ab-d2fc-4fee-9782-e01851e13c39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2279.473617] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087946, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649395} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2279.474020] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84/69c88844-84c7-4272-a2c4-051f1499df84.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2279.474334] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2279.474668] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55b3e81f-459f-421a-8cc1-31699b4221d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.481949] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2279.481949] env[63279]: value = "task-2087951" [ 2279.481949] env[63279]: _type = "Task" [ 2279.481949] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2279.492576] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087951, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.563831] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523519bc-a7f0-1e26-ec10-3b4df4f3a3a1/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2279.564869] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f1bc52-2f6a-422e-bd5c-9479c7e639ab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.571208] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523519bc-a7f0-1e26-ec10-3b4df4f3a3a1/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2279.571371] env[63279]: ERROR oslo_vmware.rw_handles [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523519bc-a7f0-1e26-ec10-3b4df4f3a3a1/disk-0.vmdk due to incomplete transfer. [ 2279.571594] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0a621738-2977-4f1b-97b0-4ac166c4733e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.578613] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523519bc-a7f0-1e26-ec10-3b4df4f3a3a1/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2279.578820] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Uploaded image a8552abc-0ec1-42e4-85be-a54106036a05 to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2279.581194] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2279.581388] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-04bec809-22be-4e23-8df2-b4de521a6373 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.587431] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2279.587431] env[63279]: value = "task-2087952" [ 2279.587431] env[63279]: _type = "Task" [ 2279.587431] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2279.594669] env[63279]: DEBUG nova.compute.manager [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Received event network-changed-fffc97fe-3975-45bb-8c80-f12056750eb4 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2279.594938] env[63279]: DEBUG nova.compute.manager [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Refreshing instance network info cache due to event network-changed-fffc97fe-3975-45bb-8c80-f12056750eb4. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2279.595222] env[63279]: DEBUG oslo_concurrency.lockutils [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] Acquiring lock "refresh_cache-6e452711-a79c-4560-b38f-9414c87e6683" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2279.595436] env[63279]: DEBUG oslo_concurrency.lockutils [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] Acquired lock "refresh_cache-6e452711-a79c-4560-b38f-9414c87e6683" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2279.595629] env[63279]: DEBUG nova.network.neutron [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Refreshing network info cache for port fffc97fe-3975-45bb-8c80-f12056750eb4 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2279.602281] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087952, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.661545] env[63279]: DEBUG nova.compute.manager [req-527525b3-85c1-4104-85a3-b64bb202ea10 req-28ef8648-5dfb-4770-9dbb-497f8ea3da11 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Received event network-changed-34ef38ab-d2fc-4fee-9782-e01851e13c39 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2279.661729] env[63279]: DEBUG nova.compute.manager [req-527525b3-85c1-4104-85a3-b64bb202ea10 req-28ef8648-5dfb-4770-9dbb-497f8ea3da11 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Refreshing instance network info cache due to event network-changed-34ef38ab-d2fc-4fee-9782-e01851e13c39. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2279.661929] env[63279]: DEBUG oslo_concurrency.lockutils [req-527525b3-85c1-4104-85a3-b64bb202ea10 req-28ef8648-5dfb-4770-9dbb-497f8ea3da11 service nova] Acquiring lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2279.716962] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087948, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.820540] env[63279]: DEBUG nova.compute.utils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2279.823954] env[63279]: INFO nova.compute.claims [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2279.827259] env[63279]: DEBUG nova.compute.manager [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2279.827510] env[63279]: DEBUG nova.network.neutron [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2279.846873] env[63279]: DEBUG oslo_vmware.api [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087950, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.877694] env[63279]: DEBUG oslo_concurrency.lockutils [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2279.878417] env[63279]: DEBUG oslo_concurrency.lockutils [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2279.878573] env[63279]: DEBUG oslo_concurrency.lockutils [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2279.880826] env[63279]: DEBUG oslo_concurrency.lockutils [req-527525b3-85c1-4104-85a3-b64bb202ea10 req-28ef8648-5dfb-4770-9dbb-497f8ea3da11 service nova] Acquired lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2279.881035] env[63279]: DEBUG nova.network.neutron [req-527525b3-85c1-4104-85a3-b64bb202ea10 req-28ef8648-5dfb-4770-9dbb-497f8ea3da11 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Refreshing network info cache for port 34ef38ab-d2fc-4fee-9782-e01851e13c39 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2279.883390] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aded88b-e50b-4fc6-b6a5-3df4e9e9b325 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.887853] env[63279]: DEBUG nova.policy [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '591c2211a5f24ffd8d624f966b4ec858', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d9c0a9c34ba408c829c0b50f3592bb2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2279.906967] env[63279]: DEBUG nova.virt.hardware [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2279.907262] env[63279]: DEBUG nova.virt.hardware [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2279.907455] env[63279]: DEBUG nova.virt.hardware [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2279.907688] env[63279]: DEBUG nova.virt.hardware [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2279.907876] env[63279]: DEBUG nova.virt.hardware [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2279.908071] env[63279]: DEBUG nova.virt.hardware [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2279.908337] env[63279]: DEBUG nova.virt.hardware [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2279.908531] env[63279]: DEBUG nova.virt.hardware [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2279.908728] env[63279]: DEBUG nova.virt.hardware [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2279.908919] env[63279]: DEBUG nova.virt.hardware [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2279.909122] env[63279]: DEBUG nova.virt.hardware [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2279.915876] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Reconfiguring VM to attach interface {{(pid=63279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 2279.916563] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d68d5da-4dc3-49e1-946c-f988e1c4c8af {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.934301] env[63279]: DEBUG oslo_vmware.api [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2279.934301] env[63279]: value = "task-2087953" [ 2279.934301] env[63279]: _type = "Task" [ 2279.934301] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2279.942558] env[63279]: DEBUG oslo_vmware.api [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087953, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.992419] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087951, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069387} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2279.992693] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2279.993469] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2263dae1-4f5e-4d11-bd1a-66da1e78f665 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.015121] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84/69c88844-84c7-4272-a2c4-051f1499df84.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2280.015452] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d4c2bd2-d04a-41f7-bd2b-bfe1fb14ae75 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.037783] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2280.037783] env[63279]: value = "task-2087954" [ 2280.037783] env[63279]: _type = "Task" [ 2280.037783] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2280.048307] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087954, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.098019] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087952, 'name': Destroy_Task, 'duration_secs': 0.39837} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2280.100731] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Destroyed the VM [ 2280.100982] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2280.101301] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f62ed11b-23d9-414f-8e65-e6eabd9e6bdf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.108866] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2280.108866] env[63279]: value = "task-2087955" [ 2280.108866] env[63279]: _type = "Task" [ 2280.108866] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2280.118682] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087955, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.217514] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087948, 'name': CreateVM_Task, 'duration_secs': 0.795316} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2280.217716] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2280.218618] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2280.218842] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2280.219191] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2280.219450] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fab25a5c-dc6f-4ae6-b4d0-7a6b3958bb1d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.225742] env[63279]: DEBUG nova.network.neutron [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Successfully created port: 949bba7c-664a-44a6-bd3b-635bd99e3501 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2280.231018] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2280.231018] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529d13dd-7204-c6b1-931b-4e911a95f8bb" [ 2280.231018] env[63279]: _type = "Task" [ 2280.231018] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2280.236085] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529d13dd-7204-c6b1-931b-4e911a95f8bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.327819] env[63279]: DEBUG nova.network.neutron [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Updated VIF entry in instance network info cache for port fffc97fe-3975-45bb-8c80-f12056750eb4. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2280.328249] env[63279]: DEBUG nova.network.neutron [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Updating instance_info_cache with network_info: [{"id": "fffc97fe-3975-45bb-8c80-f12056750eb4", "address": "fa:16:3e:c1:43:29", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfffc97fe-39", "ovs_interfaceid": "fffc97fe-3975-45bb-8c80-f12056750eb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2280.333042] env[63279]: INFO nova.compute.resource_tracker [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating resource usage from migration 060bd61a-428e-4028-bc25-f9b4d6b85fbb [ 2280.333922] env[63279]: DEBUG nova.compute.manager [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2280.352134] env[63279]: DEBUG oslo_vmware.api [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Task: {'id': task-2087950, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.58197} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2280.352477] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2280.352800] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2280.352936] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2280.353187] env[63279]: INFO nova.compute.manager [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Took 1.64 seconds to destroy the instance on the hypervisor. [ 2280.353450] env[63279]: DEBUG oslo.service.loopingcall [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2280.353657] env[63279]: DEBUG nova.compute.manager [-] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2280.353756] env[63279]: DEBUG nova.network.neutron [-] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2280.397014] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2280.397371] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-056256e4-2c0a-4d2a-8324-226944738139 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.405950] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2280.405950] env[63279]: value = "task-2087956" [ 2280.405950] env[63279]: _type = "Task" [ 2280.405950] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2280.420569] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087956, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.451209] env[63279]: DEBUG oslo_vmware.api [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087953, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.548879] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087954, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.623214] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087955, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.629927] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a4a065-e39a-4417-80c8-7fa7bfa15e9c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.637645] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2196dd3b-0419-4680-a276-f5b5148aa7c2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.686093] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b784df-9244-43f5-aaa7-43c53c035b5a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.693330] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4c4227-0a3c-4f86-85c9-293fedbb0afd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.710902] env[63279]: DEBUG nova.compute.provider_tree [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2280.739932] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529d13dd-7204-c6b1-931b-4e911a95f8bb, 'name': SearchDatastore_Task, 'duration_secs': 0.021495} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2280.740348] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2280.740590] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2280.740832] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2280.740982] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2280.741203] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2280.741483] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fe81aeb-dd84-482f-a5d8-0b1a46fd78d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.751388] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2280.751388] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2280.751889] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb74e652-67af-4578-9200-69b42353be1a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.757502] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2280.757502] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5225bdd7-c0a2-7e2e-95cd-bb8126063a4c" [ 2280.757502] env[63279]: _type = "Task" [ 2280.757502] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2280.765687] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5225bdd7-c0a2-7e2e-95cd-bb8126063a4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.831460] env[63279]: DEBUG oslo_concurrency.lockutils [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] Releasing lock "refresh_cache-6e452711-a79c-4560-b38f-9414c87e6683" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2280.831866] env[63279]: DEBUG nova.compute.manager [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Received event network-changed-15cdfe62-d983-4e01-beb9-1947d51443e0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2280.832119] env[63279]: DEBUG nova.compute.manager [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Refreshing instance network info cache due to event network-changed-15cdfe62-d983-4e01-beb9-1947d51443e0. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2280.832369] env[63279]: DEBUG oslo_concurrency.lockutils [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] Acquiring lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2280.832565] env[63279]: DEBUG oslo_concurrency.lockutils [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] Acquired lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2280.832709] env[63279]: DEBUG nova.network.neutron [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Refreshing network info cache for port 15cdfe62-d983-4e01-beb9-1947d51443e0 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2280.873566] env[63279]: DEBUG nova.network.neutron [req-527525b3-85c1-4104-85a3-b64bb202ea10 req-28ef8648-5dfb-4770-9dbb-497f8ea3da11 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updated VIF entry in instance network info cache for port 34ef38ab-d2fc-4fee-9782-e01851e13c39. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2280.874113] env[63279]: DEBUG nova.network.neutron [req-527525b3-85c1-4104-85a3-b64bb202ea10 req-28ef8648-5dfb-4770-9dbb-497f8ea3da11 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updating instance_info_cache with network_info: [{"id": "0bd52488-d960-42c9-8077-fc8fe9eda956", "address": "fa:16:3e:16:45:3c", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bd52488-d9", "ovs_interfaceid": "0bd52488-d960-42c9-8077-fc8fe9eda956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f3d164be-d41c-4e5a-97f5-a50cb539a06a", "address": "fa:16:3e:35:3a:40", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3d164be-d4", "ovs_interfaceid": "f3d164be-d41c-4e5a-97f5-a50cb539a06a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "34ef38ab-d2fc-4fee-9782-e01851e13c39", "address": "fa:16:3e:13:bf:29", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34ef38ab-d2", "ovs_interfaceid": "34ef38ab-d2fc-4fee-9782-e01851e13c39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2280.916440] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087956, 'name': PowerOffVM_Task, 'duration_secs': 0.226798} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2280.916768] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2280.917602] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b864a0f6-0123-439d-8f52-dd44eee4a938 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.943145] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bf4e52-4ad2-48c2-9bdb-a6deedd8963f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.951129] env[63279]: DEBUG oslo_vmware.api [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087953, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.978784] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2280.979156] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31438bfc-5cd4-44e6-96dc-007dae6e5e17 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.985533] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2280.985533] env[63279]: value = "task-2087957" [ 2280.985533] env[63279]: _type = "Task" [ 2280.985533] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2280.993283] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087957, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2281.048381] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087954, 'name': ReconfigVM_Task, 'duration_secs': 0.529598} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2281.048711] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84/69c88844-84c7-4272-a2c4-051f1499df84.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2281.049373] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69ab6864-8a8b-4063-967e-6d7d0718fc3a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.056098] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2281.056098] env[63279]: value = "task-2087958" [ 2281.056098] env[63279]: _type = "Task" [ 2281.056098] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2281.064381] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087958, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2281.119110] env[63279]: DEBUG oslo_vmware.api [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087955, 'name': RemoveSnapshot_Task, 'duration_secs': 0.576538} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2281.119397] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2281.119626] env[63279]: INFO nova.compute.manager [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Took 17.51 seconds to snapshot the instance on the hypervisor. [ 2281.187245] env[63279]: DEBUG nova.network.neutron [-] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2281.215025] env[63279]: DEBUG nova.scheduler.client.report [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2281.269292] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5225bdd7-c0a2-7e2e-95cd-bb8126063a4c, 'name': SearchDatastore_Task, 'duration_secs': 0.010957} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2281.270205] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a02458c4-b4f7-4e53-9e8d-aad9e708d88c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.276331] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2281.276331] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bbde56-17ce-372a-02b7-c92b8cc05c4d" [ 2281.276331] env[63279]: _type = "Task" [ 2281.276331] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2281.284377] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bbde56-17ce-372a-02b7-c92b8cc05c4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2281.342975] env[63279]: DEBUG nova.compute.manager [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2281.367643] env[63279]: DEBUG nova.virt.hardware [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2281.367959] env[63279]: DEBUG nova.virt.hardware [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2281.368205] env[63279]: DEBUG nova.virt.hardware [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2281.368390] env[63279]: DEBUG nova.virt.hardware [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2281.368563] env[63279]: DEBUG nova.virt.hardware [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2281.368745] env[63279]: DEBUG nova.virt.hardware [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2281.369025] env[63279]: DEBUG nova.virt.hardware [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2281.369216] env[63279]: DEBUG nova.virt.hardware [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2281.369419] env[63279]: DEBUG nova.virt.hardware [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2281.369639] env[63279]: DEBUG nova.virt.hardware [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2281.369840] env[63279]: DEBUG nova.virt.hardware [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2281.370861] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34817175-a559-497c-9dc4-a1f5084bb9f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.377519] env[63279]: DEBUG oslo_concurrency.lockutils [req-527525b3-85c1-4104-85a3-b64bb202ea10 req-28ef8648-5dfb-4770-9dbb-497f8ea3da11 service nova] Releasing lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2281.382051] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c89b18-7427-44fb-9ba0-4ad8bbeeb864 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.450892] env[63279]: DEBUG oslo_vmware.api [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087953, 'name': ReconfigVM_Task, 'duration_secs': 1.018001} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2281.451450] env[63279]: DEBUG oslo_concurrency.lockutils [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2281.451970] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Reconfigured VM to attach interface {{(pid=63279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 2281.497547] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] VM already powered off {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2281.497766] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2281.498434] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2281.567120] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087958, 'name': Rename_Task, 'duration_secs': 0.353359} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2281.567408] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2281.567662] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5e993b1-00e3-4c0b-91ef-6f188a2efa71 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.573456] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2281.573456] env[63279]: value = "task-2087959" [ 2281.573456] env[63279]: _type = "Task" [ 2281.573456] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2281.580931] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087959, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2281.622649] env[63279]: DEBUG nova.network.neutron [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Updated VIF entry in instance network info cache for port 15cdfe62-d983-4e01-beb9-1947d51443e0. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2281.623156] env[63279]: DEBUG nova.network.neutron [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Updating instance_info_cache with network_info: [{"id": "15cdfe62-d983-4e01-beb9-1947d51443e0", "address": "fa:16:3e:a4:65:02", "network": {"id": "f7000655-b20b-461d-9d08-f4cb8a85522e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-686033866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7674483744fe490b8cbe75532dfad77c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15cdfe62-d9", "ovs_interfaceid": "15cdfe62-d983-4e01-beb9-1947d51443e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2281.663342] env[63279]: DEBUG nova.compute.manager [None req-a019c069-31ac-44de-94e8-2f9e51596f90 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Found 2 images (rotation: 2) {{(pid=63279) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 2281.690416] env[63279]: INFO nova.compute.manager [-] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Took 1.34 seconds to deallocate network for instance. [ 2281.719365] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.402s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2281.720181] env[63279]: INFO nova.compute.manager [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Migrating [ 2281.790346] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bbde56-17ce-372a-02b7-c92b8cc05c4d, 'name': SearchDatastore_Task, 'duration_secs': 0.010769} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2281.790785] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2281.791107] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6e452711-a79c-4560-b38f-9414c87e6683/6e452711-a79c-4560-b38f-9414c87e6683.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2281.791498] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2281.791765] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2281.792049] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e651c1b-0d6c-4af7-ac2e-0b6c7ec722d5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.794651] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f6bb717-4850-4655-9f5f-a2079ddb8474 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.802739] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2281.802739] env[63279]: value = "task-2087960" [ 2281.802739] env[63279]: _type = "Task" [ 2281.802739] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2281.808933] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2281.809252] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2281.810697] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-750efa35-5ffc-40c4-89ef-064ae4fc41c9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.819616] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087960, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2281.823218] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2281.823218] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52e1762e-42b9-c83e-796c-69332e5b2f74" [ 2281.823218] env[63279]: _type = "Task" [ 2281.823218] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2281.834453] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e1762e-42b9-c83e-796c-69332e5b2f74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2281.908583] env[63279]: DEBUG nova.compute.manager [req-bdd28c02-085c-41cf-8856-3bbd7b062b8a req-6b9b5a9a-844d-497e-8ef9-5be4511a1640 service nova] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Received event network-vif-deleted-c8b3dd94-c21a-424e-85d2-ea499e510f55 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2281.956347] env[63279]: DEBUG oslo_concurrency.lockutils [None req-16d82225-da8f-4b23-ae90-64edd2b706db tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-34ef38ab-d2fc-4fee-9782-e01851e13c39" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.517s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2282.083571] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087959, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.109099] env[63279]: DEBUG nova.network.neutron [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Successfully updated port: 949bba7c-664a-44a6-bd3b-635bd99e3501 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2282.127416] env[63279]: DEBUG oslo_concurrency.lockutils [req-6deca7e9-8ff1-42e6-992a-3892524bd339 req-64e01dbb-9a0a-40df-a7f1-80ae8c25e212 service nova] Releasing lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2282.198469] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2282.199127] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2282.200484] env[63279]: DEBUG nova.objects.instance [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lazy-loading 'resources' on Instance uuid 59ad6180-e561-41e3-86e4-37fc20819578 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2282.237095] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2282.237095] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2282.237095] env[63279]: DEBUG nova.network.neutron [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2282.300601] env[63279]: DEBUG nova.compute.manager [req-2d8d7d96-a3ec-433d-963a-c4cf822b7c4d req-e1f2464d-4247-4661-a40c-ddc09976dd9a service nova] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Received event network-vif-plugged-949bba7c-664a-44a6-bd3b-635bd99e3501 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2282.300601] env[63279]: DEBUG oslo_concurrency.lockutils [req-2d8d7d96-a3ec-433d-963a-c4cf822b7c4d req-e1f2464d-4247-4661-a40c-ddc09976dd9a service nova] Acquiring lock "3de52a94-c1de-4b37-985c-9101417260e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2282.300601] env[63279]: DEBUG oslo_concurrency.lockutils [req-2d8d7d96-a3ec-433d-963a-c4cf822b7c4d req-e1f2464d-4247-4661-a40c-ddc09976dd9a service nova] Lock "3de52a94-c1de-4b37-985c-9101417260e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2282.300916] env[63279]: DEBUG oslo_concurrency.lockutils [req-2d8d7d96-a3ec-433d-963a-c4cf822b7c4d req-e1f2464d-4247-4661-a40c-ddc09976dd9a service nova] Lock "3de52a94-c1de-4b37-985c-9101417260e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2282.300962] env[63279]: DEBUG nova.compute.manager [req-2d8d7d96-a3ec-433d-963a-c4cf822b7c4d req-e1f2464d-4247-4661-a40c-ddc09976dd9a service nova] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] No waiting events found dispatching network-vif-plugged-949bba7c-664a-44a6-bd3b-635bd99e3501 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2282.301226] env[63279]: WARNING nova.compute.manager [req-2d8d7d96-a3ec-433d-963a-c4cf822b7c4d req-e1f2464d-4247-4661-a40c-ddc09976dd9a service nova] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Received unexpected event network-vif-plugged-949bba7c-664a-44a6-bd3b-635bd99e3501 for instance with vm_state building and task_state spawning. [ 2282.313899] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087960, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.335753] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e1762e-42b9-c83e-796c-69332e5b2f74, 'name': SearchDatastore_Task, 'duration_secs': 0.009221} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2282.336427] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a047e277-6620-40d5-8b78-425e649b91fe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.343118] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2282.343118] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]525d5391-1a22-ff99-b625-507a7f9d4bdc" [ 2282.343118] env[63279]: _type = "Task" [ 2282.343118] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.352257] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525d5391-1a22-ff99-b625-507a7f9d4bdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.387510] env[63279]: DEBUG nova.compute.manager [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2282.388579] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ee77d4-1973-40c5-ade2-16ac13ac3afb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.583417] env[63279]: DEBUG oslo_vmware.api [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2087959, 'name': PowerOnVM_Task, 'duration_secs': 0.695665} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2282.583709] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2282.583915] env[63279]: DEBUG nova.compute.manager [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2282.585064] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fe127a-b4de-42fc-b7f2-788e4a02878f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.612893] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "refresh_cache-3de52a94-c1de-4b37-985c-9101417260e1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2282.613183] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquired lock "refresh_cache-3de52a94-c1de-4b37-985c-9101417260e1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2282.613425] env[63279]: DEBUG nova.network.neutron [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2282.813440] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087960, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.854657] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]525d5391-1a22-ff99-b625-507a7f9d4bdc, 'name': SearchDatastore_Task, 'duration_secs': 0.049264} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2282.856808] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2282.857100] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk. {{(pid=63279) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2282.857369] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33f4530c-509e-4884-91fe-d8ae45a9c684 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.866705] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2282.866705] env[63279]: value = "task-2087961" [ 2282.866705] env[63279]: _type = "Task" [ 2282.866705] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.876451] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087961, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.899737] env[63279]: INFO nova.compute.manager [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] instance snapshotting [ 2282.900322] env[63279]: DEBUG nova.objects.instance [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'flavor' on Instance uuid 686a7ce2-2d07-411e-91d6-0471c55c3728 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2282.924367] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75443641-b7e6-4ad1-bd4f-c2cc33acf5f9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.931893] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed76eeef-5ad0-47e9-aafe-08f846e946d4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.964128] env[63279]: DEBUG nova.network.neutron [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating instance_info_cache with network_info: [{"id": "ee08f9b0-ce61-4fa5-bf67-3a97acafc55e", "address": "fa:16:3e:16:9a:ab", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee08f9b0-ce", "ovs_interfaceid": "ee08f9b0-ce61-4fa5-bf67-3a97acafc55e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2282.965792] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48960215-611b-4634-bde5-95da8e79403d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.973722] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cace11cc-5aa4-478d-85ed-8167dfe53b5c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.987896] env[63279]: DEBUG nova.compute.provider_tree [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2283.100202] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.144574] env[63279]: DEBUG nova.network.neutron [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2283.314209] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087960, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.327553] env[63279]: DEBUG nova.network.neutron [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Updating instance_info_cache with network_info: [{"id": "949bba7c-664a-44a6-bd3b-635bd99e3501", "address": "fa:16:3e:6a:d2:56", "network": {"id": "7b4d0149-03c5-4c40-ba16-d705499cd558", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1026068065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9c0a9c34ba408c829c0b50f3592bb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap949bba7c-66", "ovs_interfaceid": "949bba7c-664a-44a6-bd3b-635bd99e3501", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2283.377483] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087961, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.408335] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43fba57a-8ad0-4ae1-aa54-34f72519bda4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.427516] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb142c8-69c8-451b-ab08-24878eebcdf2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.469731] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2283.490507] env[63279]: DEBUG nova.scheduler.client.report [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2283.816647] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087960, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.708746} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2283.817068] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6e452711-a79c-4560-b38f-9414c87e6683/6e452711-a79c-4560-b38f-9414c87e6683.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2283.817317] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2283.817628] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce076163-7771-47cb-8745-0bae3fb100a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.820378] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "interface-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-f3d164be-d41c-4e5a-97f5-a50cb539a06a" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.820647] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-f3d164be-d41c-4e5a-97f5-a50cb539a06a" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.827485] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2283.827485] env[63279]: value = "task-2087962" [ 2283.827485] env[63279]: _type = "Task" [ 2283.827485] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.832428] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Releasing lock "refresh_cache-3de52a94-c1de-4b37-985c-9101417260e1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2283.832690] env[63279]: DEBUG nova.compute.manager [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Instance network_info: |[{"id": "949bba7c-664a-44a6-bd3b-635bd99e3501", "address": "fa:16:3e:6a:d2:56", "network": {"id": "7b4d0149-03c5-4c40-ba16-d705499cd558", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1026068065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9c0a9c34ba408c829c0b50f3592bb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap949bba7c-66", "ovs_interfaceid": "949bba7c-664a-44a6-bd3b-635bd99e3501", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2283.833101] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:d2:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aaf1b231-3660-4453-b4f3-44d825b9a5dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '949bba7c-664a-44a6-bd3b-635bd99e3501', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2283.841035] env[63279]: DEBUG oslo.service.loopingcall [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2283.842211] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2283.842503] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-929be6fd-07da-43a1-8f0f-93df8d694f8b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.862182] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087962, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.867836] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2283.867836] env[63279]: value = "task-2087963" [ 2283.867836] env[63279]: _type = "Task" [ 2283.867836] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.882502] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087961, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.882704] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087963, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.939930] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2283.940468] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6c9adf88-61c6-4acf-9718-e68a22a5b595 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.948342] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2283.948342] env[63279]: value = "task-2087964" [ 2283.948342] env[63279]: _type = "Task" [ 2283.948342] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.957125] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087964, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.996088] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.796s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2283.997391] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.897s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.997586] env[63279]: DEBUG nova.objects.instance [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2284.020215] env[63279]: INFO nova.scheduler.client.report [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Deleted allocations for instance 59ad6180-e561-41e3-86e4-37fc20819578 [ 2284.067070] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2284.067361] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2284.323908] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2284.324120] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2284.325288] env[63279]: DEBUG nova.compute.manager [req-e45c2255-5245-45c8-a57d-930ece3bf5f3 req-4a51fe58-835b-4535-8793-38ea01a165d0 service nova] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Received event network-changed-949bba7c-664a-44a6-bd3b-635bd99e3501 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2284.325467] env[63279]: DEBUG nova.compute.manager [req-e45c2255-5245-45c8-a57d-930ece3bf5f3 req-4a51fe58-835b-4535-8793-38ea01a165d0 service nova] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Refreshing instance network info cache due to event network-changed-949bba7c-664a-44a6-bd3b-635bd99e3501. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2284.325666] env[63279]: DEBUG oslo_concurrency.lockutils [req-e45c2255-5245-45c8-a57d-930ece3bf5f3 req-4a51fe58-835b-4535-8793-38ea01a165d0 service nova] Acquiring lock "refresh_cache-3de52a94-c1de-4b37-985c-9101417260e1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2284.325812] env[63279]: DEBUG oslo_concurrency.lockutils [req-e45c2255-5245-45c8-a57d-930ece3bf5f3 req-4a51fe58-835b-4535-8793-38ea01a165d0 service nova] Acquired lock "refresh_cache-3de52a94-c1de-4b37-985c-9101417260e1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2284.325972] env[63279]: DEBUG nova.network.neutron [req-e45c2255-5245-45c8-a57d-930ece3bf5f3 req-4a51fe58-835b-4535-8793-38ea01a165d0 service nova] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Refreshing network info cache for port 949bba7c-664a-44a6-bd3b-635bd99e3501 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2284.327926] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7849dcaa-ca36-4b2b-815f-0bdcccceabbd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.342277] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087962, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139402} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2284.354856] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2284.356189] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6056b9-194e-4bd6-b08b-819939ad5d7d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.359046] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aebcffa-7840-4f98-a470-45df6a03e48a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.395992] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Reconfiguring VM to detach interface {{(pid=63279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 2284.404344] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 6e452711-a79c-4560-b38f-9414c87e6683/6e452711-a79c-4560-b38f-9414c87e6683.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2284.410019] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adce9af5-87d3-449f-9051-386f7c186ed7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.423261] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d234c55-411b-4d2f-9ffe-8041140a1571 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.442112] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087963, 'name': CreateVM_Task, 'duration_secs': 0.500576} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2284.446539] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2284.446954] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2284.446954] env[63279]: value = "task-2087965" [ 2284.446954] env[63279]: _type = "Task" [ 2284.446954] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.447123] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2284.447123] env[63279]: value = "task-2087966" [ 2284.447123] env[63279]: _type = "Task" [ 2284.447123] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.448832] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087961, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.092048} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2284.448832] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2284.448832] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2284.448832] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2284.448832] env[63279]: INFO nova.virt.vmwareapi.ds_util [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk. [ 2284.449069] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dde55a10-3b86-4899-88f1-ee7fcafcc225 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.457065] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7e5b18-5532-40e2-8383-66c3f28bc3e8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.469869] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.470249] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087965, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.491621] env[63279]: INFO nova.compute.manager [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Rebuilding instance [ 2284.503581] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2284.503991] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2284.503991] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526c44e1-fe7d-8671-2fec-7b89e14d8260" [ 2284.503991] env[63279]: _type = "Task" [ 2284.503991] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.504259] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087964, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.508551] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7c3a39a-0a53-4b36-a190-4d52bb9d5b06 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.527274] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54619c35-9f4c-41b9-970e-d144dae4e3d2 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.530s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2284.534456] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e61b069-7c67-4b0d-bc91-a8ce70644860 tempest-ServerRescueTestJSON-987826405 tempest-ServerRescueTestJSON-987826405-project-member] Lock "59ad6180-e561-41e3-86e4-37fc20819578" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.330s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2284.543124] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526c44e1-fe7d-8671-2fec-7b89e14d8260, 'name': SearchDatastore_Task, 'duration_secs': 0.009392} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2284.548480] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2284.548738] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2284.548973] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2284.549162] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2284.549349] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2284.549674] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2284.549674] env[63279]: value = "task-2087967" [ 2284.549674] env[63279]: _type = "Task" [ 2284.549674] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.550810] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25e2da98-2c0e-4ce4-b653-0f7459df44d4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.562375] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087967, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.567962] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2284.568170] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2284.569106] env[63279]: DEBUG nova.compute.manager [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2284.569389] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e31a30b3-79cf-4a2c-a44f-60d31df3fa36 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.572249] env[63279]: INFO nova.compute.manager [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Detaching volume d14dfff0-b332-4aa4-94bb-62b564929c56 [ 2284.574300] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0131ca-3874-4cc2-bbbb-ab7c61baa8d4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.580453] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2284.580453] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a9717e-83f0-19b1-67d9-3b3b64aee4d1" [ 2284.580453] env[63279]: _type = "Task" [ 2284.580453] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.596016] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a9717e-83f0-19b1-67d9-3b3b64aee4d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.619916] env[63279]: INFO nova.virt.block_device [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Attempting to driver detach volume d14dfff0-b332-4aa4-94bb-62b564929c56 from mountpoint /dev/sdb [ 2284.620260] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2284.620735] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427738', 'volume_id': 'd14dfff0-b332-4aa4-94bb-62b564929c56', 'name': 'volume-d14dfff0-b332-4aa4-94bb-62b564929c56', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'a15141bc-424d-48ca-a6d5-c859a3639a0b', 'attached_at': '', 'detached_at': '', 'volume_id': 'd14dfff0-b332-4aa4-94bb-62b564929c56', 'serial': 'd14dfff0-b332-4aa4-94bb-62b564929c56'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2284.621512] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd950d7-84df-475e-9d91-b649b1908016 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.645267] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b84a0e-d733-4b42-86b3-78fb13242563 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.652430] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4710428-a377-41b7-9ef8-0680aecd1890 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.673018] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd558c15-3b32-4681-81bf-3910db3448c1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.692144] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] The volume has not been displaced from its original location: [datastore1] volume-d14dfff0-b332-4aa4-94bb-62b564929c56/volume-d14dfff0-b332-4aa4-94bb-62b564929c56.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2284.699020] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Reconfiguring VM instance instance-00000040 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2284.699020] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bff1863e-9c24-48d3-be5f-3a380ba90f65 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.717650] env[63279]: DEBUG oslo_vmware.api [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2284.717650] env[63279]: value = "task-2087968" [ 2284.717650] env[63279]: _type = "Task" [ 2284.717650] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.726058] env[63279]: DEBUG oslo_vmware.api [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087968, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.970595] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.970663] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087965, 'name': ReconfigVM_Task, 'duration_secs': 0.47773} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2284.970853] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087964, 'name': CreateSnapshot_Task, 'duration_secs': 0.667604} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2284.971125] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 6e452711-a79c-4560-b38f-9414c87e6683/6e452711-a79c-4560-b38f-9414c87e6683.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2284.971777] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2284.972015] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86938ee3-bf89-43da-ada7-02bea11a2d68 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.973971] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ea08aa-6534-409d-b759-a4f3616ab02a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.984343] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2284.984343] env[63279]: value = "task-2087969" [ 2284.984343] env[63279]: _type = "Task" [ 2284.984343] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.994226] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087969, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.009307] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b69cec4-5982-455d-b356-ed7313005dac {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.027638] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating instance 'd0b8c9dd-c683-4f3a-b819-d9d57d96636b' progress to 0 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2285.062673] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087967, 'name': ReconfigVM_Task, 'duration_secs': 0.350008} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.062965] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed/30887889-e45b-4f67-8b3c-16216e594a90-rescue.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2285.063840] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcf4793-7961-4b69-aa56-f35f7f0476f4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.096513] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de735169-d158-42eb-9a5c-be53f1160c11 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.118063] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a9717e-83f0-19b1-67d9-3b3b64aee4d1, 'name': SearchDatastore_Task, 'duration_secs': 0.016631} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.120074] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2285.120074] env[63279]: value = "task-2087970" [ 2285.120074] env[63279]: _type = "Task" [ 2285.120074] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.120310] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6deea7b1-8950-442a-8da2-e30195f9332f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.130054] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2285.130054] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b02558-f90d-c637-c5e5-734204e85c1b" [ 2285.130054] env[63279]: _type = "Task" [ 2285.130054] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.133330] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087970, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.141351] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b02558-f90d-c637-c5e5-734204e85c1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.228298] env[63279]: DEBUG oslo_vmware.api [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087968, 'name': ReconfigVM_Task, 'duration_secs': 0.228205} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.228558] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Reconfigured VM instance instance-00000040 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2285.233288] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5e50d86-c130-433b-99bf-26bcb2ab90f8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.248229] env[63279]: DEBUG oslo_vmware.api [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2285.248229] env[63279]: value = "task-2087971" [ 2285.248229] env[63279]: _type = "Task" [ 2285.248229] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.256640] env[63279]: DEBUG oslo_vmware.api [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087971, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.419599] env[63279]: DEBUG nova.network.neutron [req-e45c2255-5245-45c8-a57d-930ece3bf5f3 req-4a51fe58-835b-4535-8793-38ea01a165d0 service nova] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Updated VIF entry in instance network info cache for port 949bba7c-664a-44a6-bd3b-635bd99e3501. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2285.420179] env[63279]: DEBUG nova.network.neutron [req-e45c2255-5245-45c8-a57d-930ece3bf5f3 req-4a51fe58-835b-4535-8793-38ea01a165d0 service nova] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Updating instance_info_cache with network_info: [{"id": "949bba7c-664a-44a6-bd3b-635bd99e3501", "address": "fa:16:3e:6a:d2:56", "network": {"id": "7b4d0149-03c5-4c40-ba16-d705499cd558", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1026068065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9c0a9c34ba408c829c0b50f3592bb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap949bba7c-66", "ovs_interfaceid": "949bba7c-664a-44a6-bd3b-635bd99e3501", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2285.465747] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.494364] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2285.494725] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f35bcbce-4cd8-458b-b4b3-ce7e0d971b82 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.507589] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087969, 'name': Rename_Task, 'duration_secs': 0.404427} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.509666] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2285.509986] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2285.509986] env[63279]: value = "task-2087972" [ 2285.509986] env[63279]: _type = "Task" [ 2285.509986] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.510260] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-803ad7cc-d911-445a-82d1-9e28faad88c0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.520365] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2285.520365] env[63279]: value = "task-2087973" [ 2285.520365] env[63279]: _type = "Task" [ 2285.520365] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.524192] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.535100] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2285.535535] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087973, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.535862] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6532adf0-8591-4c14-84ab-3ae22e871a23 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.544550] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2285.544550] env[63279]: value = "task-2087974" [ 2285.544550] env[63279]: _type = "Task" [ 2285.544550] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.553054] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087974, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.609827] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2285.610333] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-416c557e-b35d-4198-ac67-a71bebea12c4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.618131] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Waiting for the task: (returnval){ [ 2285.618131] env[63279]: value = "task-2087975" [ 2285.618131] env[63279]: _type = "Task" [ 2285.618131] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.631460] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087975, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.637168] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087970, 'name': ReconfigVM_Task, 'duration_secs': 0.352193} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.640820] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2285.641377] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1dbc16c8-8ee2-449f-bfbd-75e460c74c03 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.649095] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b02558-f90d-c637-c5e5-734204e85c1b, 'name': SearchDatastore_Task, 'duration_secs': 0.023492} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.650868] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2285.651256] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 3de52a94-c1de-4b37-985c-9101417260e1/3de52a94-c1de-4b37-985c-9101417260e1.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2285.651658] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2285.651658] env[63279]: value = "task-2087976" [ 2285.651658] env[63279]: _type = "Task" [ 2285.651658] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.651907] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a9c6bede-80bf-4fdd-a206-294c987e11f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.664921] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087976, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.667260] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2285.667260] env[63279]: value = "task-2087977" [ 2285.667260] env[63279]: _type = "Task" [ 2285.667260] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.679127] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087977, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.760211] env[63279]: DEBUG oslo_vmware.api [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087971, 'name': ReconfigVM_Task, 'duration_secs': 0.203435} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.761587] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427738', 'volume_id': 'd14dfff0-b332-4aa4-94bb-62b564929c56', 'name': 'volume-d14dfff0-b332-4aa4-94bb-62b564929c56', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'a15141bc-424d-48ca-a6d5-c859a3639a0b', 'attached_at': '', 'detached_at': '', 'volume_id': 'd14dfff0-b332-4aa4-94bb-62b564929c56', 'serial': 'd14dfff0-b332-4aa4-94bb-62b564929c56'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2285.925596] env[63279]: DEBUG oslo_concurrency.lockutils [req-e45c2255-5245-45c8-a57d-930ece3bf5f3 req-4a51fe58-835b-4535-8793-38ea01a165d0 service nova] Releasing lock "refresh_cache-3de52a94-c1de-4b37-985c-9101417260e1" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2285.960997] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.026616] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.036174] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087973, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.055069] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087974, 'name': PowerOffVM_Task, 'duration_secs': 0.255874} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.055391] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2286.055587] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating instance 'd0b8c9dd-c683-4f3a-b819-d9d57d96636b' progress to 17 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2286.134418] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087975, 'name': PowerOffVM_Task, 'duration_secs': 0.176077} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.135032] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2286.135542] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2286.136888] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522f09d2-2c1f-4a2c-ad4e-049810966b2f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.147869] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2286.148229] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f115732b-80f0-410b-a29f-8ce2c3cc1924 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.170034] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087976, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.175797] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2286.176369] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2286.176502] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Deleting the datastore file [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2286.176795] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20b0127c-665e-49bf-ac6b-6db31c4512c3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.183069] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087977, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.188922] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Waiting for the task: (returnval){ [ 2286.188922] env[63279]: value = "task-2087979" [ 2286.188922] env[63279]: _type = "Task" [ 2286.188922] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.198091] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087979, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.315175] env[63279]: DEBUG nova.objects.instance [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lazy-loading 'flavor' on Instance uuid a15141bc-424d-48ca-a6d5-c859a3639a0b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2286.322665] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2286.322665] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2286.461280] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.525491] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.536584] env[63279]: DEBUG oslo_vmware.api [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087973, 'name': PowerOnVM_Task, 'duration_secs': 0.917029} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.536815] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2286.537033] env[63279]: INFO nova.compute.manager [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Took 9.47 seconds to spawn the instance on the hypervisor. [ 2286.537274] env[63279]: DEBUG nova.compute.manager [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2286.538100] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f6cec3-9c50-461b-9f95-f1844dbd334b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.563978] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2286.563978] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2286.563978] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2286.564383] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2286.564383] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2286.565561] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2286.565727] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2286.566184] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2286.566184] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2286.566285] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2286.567023] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2286.575024] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fc029d6-25b7-4639-b758-4e85c55c55db {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.594784] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2286.594784] env[63279]: value = "task-2087980" [ 2286.594784] env[63279]: _type = "Task" [ 2286.594784] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.608737] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087980, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.666252] env[63279]: DEBUG oslo_vmware.api [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087976, 'name': PowerOnVM_Task, 'duration_secs': 0.766688} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.666912] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2286.669784] env[63279]: DEBUG nova.compute.manager [None req-cda03fef-d08c-4df2-9f4f-bf02004d87f1 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2286.672154] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57c569d-a78e-43b7-ae22-4e9ebdfba7be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.692251] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087977, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.647503} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.695921] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 3de52a94-c1de-4b37-985c-9101417260e1/3de52a94-c1de-4b37-985c-9101417260e1.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2286.696224] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2286.699049] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46493ccd-c7a5-4fef-9cba-47392e6e5e58 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.706183] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087979, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205052} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.707481] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2286.707693] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2286.708530] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2286.711479] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2286.711479] env[63279]: value = "task-2087981" [ 2286.711479] env[63279]: _type = "Task" [ 2286.711479] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.720187] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087981, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.830254] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2286.830254] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 2286.830254] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 2286.963194] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.028853] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.058987] env[63279]: INFO nova.compute.manager [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Took 15.31 seconds to build instance. [ 2287.103991] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087980, 'name': ReconfigVM_Task, 'duration_secs': 0.429488} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.104725] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating instance 'd0b8c9dd-c683-4f3a-b819-d9d57d96636b' progress to 33 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2287.225887] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087981, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.273988} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.226110] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2287.226779] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4506b9a1-26fb-48f2-8484-4887356e5ec3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.256285] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 3de52a94-c1de-4b37-985c-9101417260e1/3de52a94-c1de-4b37-985c-9101417260e1.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2287.257298] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb148489-d3b0-4ffc-a440-f3fd06a6eb3f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.278440] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2287.278440] env[63279]: value = "task-2087982" [ 2287.278440] env[63279]: _type = "Task" [ 2287.278440] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.288772] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087982, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.321756] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d9e80deb-245d-4ccf-a360-8692dd051097 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.254s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2287.336731] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Skipping network cache update for instance because it is Building. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10460}} [ 2287.364710] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2287.364873] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquired lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2287.365043] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Forcefully refreshing network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2287.365215] env[63279]: DEBUG nova.objects.instance [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lazy-loading 'info_cache' on Instance uuid a15141bc-424d-48ca-a6d5-c859a3639a0b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2287.462666] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.526691] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.561686] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d3f6bc6-99d6-42fc-8d00-639d1e794324 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "6e452711-a79c-4560-b38f-9414c87e6683" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.819s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2287.610971] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2287.611282] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2287.611456] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2287.611642] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2287.611793] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2287.611943] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2287.612169] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2287.612334] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2287.612508] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2287.612687] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2287.612884] env[63279]: DEBUG nova.virt.hardware [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2287.618476] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Reconfiguring VM instance instance-00000064 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2287.618778] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a9c56e3-be87-428f-adf9-f3525920a6ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.638049] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2287.638049] env[63279]: value = "task-2087983" [ 2287.638049] env[63279]: _type = "Task" [ 2287.638049] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.645903] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087983, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.650384] env[63279]: INFO nova.compute.manager [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Unrescuing [ 2287.650619] env[63279]: DEBUG oslo_concurrency.lockutils [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2287.651114] env[63279]: DEBUG oslo_concurrency.lockutils [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquired lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2287.651114] env[63279]: DEBUG nova.network.neutron [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2287.735884] env[63279]: DEBUG oslo_concurrency.lockutils [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "6e452711-a79c-4560-b38f-9414c87e6683" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2287.736161] env[63279]: DEBUG oslo_concurrency.lockutils [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "6e452711-a79c-4560-b38f-9414c87e6683" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2287.736381] env[63279]: DEBUG oslo_concurrency.lockutils [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "6e452711-a79c-4560-b38f-9414c87e6683-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2287.736559] env[63279]: DEBUG oslo_concurrency.lockutils [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "6e452711-a79c-4560-b38f-9414c87e6683-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2287.736730] env[63279]: DEBUG oslo_concurrency.lockutils [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "6e452711-a79c-4560-b38f-9414c87e6683-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2287.742848] env[63279]: INFO nova.compute.manager [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Terminating instance [ 2287.751417] env[63279]: DEBUG nova.virt.hardware [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2287.751417] env[63279]: DEBUG nova.virt.hardware [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2287.751417] env[63279]: DEBUG nova.virt.hardware [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2287.751660] env[63279]: DEBUG nova.virt.hardware [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2287.751660] env[63279]: DEBUG nova.virt.hardware [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2287.751816] env[63279]: DEBUG nova.virt.hardware [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2287.752070] env[63279]: DEBUG nova.virt.hardware [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2287.752282] env[63279]: DEBUG nova.virt.hardware [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2287.752532] env[63279]: DEBUG nova.virt.hardware [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2287.752726] env[63279]: DEBUG nova.virt.hardware [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2287.752955] env[63279]: DEBUG nova.virt.hardware [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2287.753867] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2259739-b3b7-4bfe-bd48-b663c20a4778 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.763288] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8818f488-5dda-4984-9426-c52f95963295 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.778573] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Instance VIF info [] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2287.784082] env[63279]: DEBUG oslo.service.loopingcall [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2287.784400] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2287.787626] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-73c30965-38a4-4982-851e-d623841f5b08 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.804712] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087982, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.806383] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2287.806383] env[63279]: value = "task-2087984" [ 2287.806383] env[63279]: _type = "Task" [ 2287.806383] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.813499] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087984, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.963642] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.025421] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.151280] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087983, 'name': ReconfigVM_Task, 'duration_secs': 0.183244} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.151611] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Reconfigured VM instance instance-00000064 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2288.152326] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de81bebe-b397-4f5d-ae7d-2ef73163dc9a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.177727] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] d0b8c9dd-c683-4f3a-b819-d9d57d96636b/d0b8c9dd-c683-4f3a-b819-d9d57d96636b.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2288.178157] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c6f7c16-f981-4348-9880-a6839a19c0fe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.197825] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2288.197825] env[63279]: value = "task-2087985" [ 2288.197825] env[63279]: _type = "Task" [ 2288.197825] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.205240] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087985, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.246985] env[63279]: DEBUG nova.compute.manager [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2288.247131] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2288.248489] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717184bc-6022-42a4-9d47-c4cd45249ef2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.255905] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2288.256170] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99ee17e0-c450-481f-848d-7a6f1a8d3bc2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.262243] env[63279]: DEBUG oslo_vmware.api [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2288.262243] env[63279]: value = "task-2087986" [ 2288.262243] env[63279]: _type = "Task" [ 2288.262243] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.270976] env[63279]: DEBUG oslo_vmware.api [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087986, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.293553] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087982, 'name': ReconfigVM_Task, 'duration_secs': 0.871461} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.296034] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 3de52a94-c1de-4b37-985c-9101417260e1/3de52a94-c1de-4b37-985c-9101417260e1.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2288.296688] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-71552e7d-ed10-45a6-ba0a-2af07174d85b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.303055] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2288.303055] env[63279]: value = "task-2087987" [ 2288.303055] env[63279]: _type = "Task" [ 2288.303055] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.314016] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087987, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.319570] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2087984, 'name': CreateVM_Task, 'duration_secs': 0.436501} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.319749] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2288.320227] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2288.320392] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2288.320759] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2288.321070] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f04f8dbf-c460-4b46-bfbc-1252b7e2d825 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.325578] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Waiting for the task: (returnval){ [ 2288.325578] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c48c7d-5d80-ca41-9497-3448406595c6" [ 2288.325578] env[63279]: _type = "Task" [ 2288.325578] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.333674] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c48c7d-5d80-ca41-9497-3448406595c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.381923] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2288.382239] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2288.382485] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "a15141bc-424d-48ca-a6d5-c859a3639a0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2288.382735] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2288.382968] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2288.385404] env[63279]: INFO nova.compute.manager [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Terminating instance [ 2288.468878] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.529214] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.530314] env[63279]: DEBUG nova.network.neutron [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Updating instance_info_cache with network_info: [{"id": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "address": "fa:16:3e:68:12:c8", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f12bb0d-ee", "ovs_interfaceid": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2288.708463] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087985, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.771983] env[63279]: DEBUG oslo_vmware.api [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2087986, 'name': PowerOffVM_Task, 'duration_secs': 0.176527} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.772303] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2288.773585] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2288.773585] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61e7fb96-e797-4ac4-89cd-4a0812b0f538 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.812182] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087987, 'name': Rename_Task, 'duration_secs': 0.133738} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.812474] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2288.812721] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33ef33e9-ab47-4cdd-aeec-0362e3450049 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.819042] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2288.819042] env[63279]: value = "task-2087989" [ 2288.819042] env[63279]: _type = "Task" [ 2288.819042] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.826694] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087989, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.837024] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c48c7d-5d80-ca41-9497-3448406595c6, 'name': SearchDatastore_Task, 'duration_secs': 0.00959} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.837024] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2288.837024] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2288.837024] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2288.837346] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2288.837541] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2288.837874] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-438882a3-7842-4351-8f2c-814e46b081be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.846998] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2288.847768] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2288.847921] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d257e968-4ffd-4494-b803-e8478409bcd6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.853990] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Waiting for the task: (returnval){ [ 2288.853990] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523f0306-88a5-be86-94a2-0cfd9925f90a" [ 2288.853990] env[63279]: _type = "Task" [ 2288.853990] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.862962] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523f0306-88a5-be86-94a2-0cfd9925f90a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.888923] env[63279]: DEBUG nova.compute.manager [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2288.889179] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2288.890158] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a622b32-00eb-4cef-92eb-4a0e0adb8d59 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.897660] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2288.898330] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35b2e38f-e285-4344-b101-6a2c74918d63 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.903496] env[63279]: DEBUG oslo_vmware.api [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2288.903496] env[63279]: value = "task-2087990" [ 2288.903496] env[63279]: _type = "Task" [ 2288.903496] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.911919] env[63279]: DEBUG oslo_vmware.api [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.964412] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.028305] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.033287] env[63279]: DEBUG oslo_concurrency.lockutils [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Releasing lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2289.033988] env[63279]: DEBUG nova.objects.instance [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lazy-loading 'flavor' on Instance uuid 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2289.204389] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updating instance_info_cache with network_info: [{"id": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "address": "fa:16:3e:fe:5c:41", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd0a8ed6-d1", "ovs_interfaceid": "cd0a8ed6-d1ee-4266-8bde-e866ac2873ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2289.211128] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087985, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.329166] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087989, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.364995] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523f0306-88a5-be86-94a2-0cfd9925f90a, 'name': SearchDatastore_Task, 'duration_secs': 0.014236} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.365759] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-082598a8-f62e-4a14-8b2f-ac5d12423bbc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.370794] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Waiting for the task: (returnval){ [ 2289.370794] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]528e7746-9e27-20c3-232d-6e28282bbda2" [ 2289.370794] env[63279]: _type = "Task" [ 2289.370794] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.378349] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]528e7746-9e27-20c3-232d-6e28282bbda2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.412979] env[63279]: DEBUG oslo_vmware.api [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2087990, 'name': PowerOffVM_Task, 'duration_secs': 0.177227} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.413265] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2289.413446] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2289.413696] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-81a1ea0f-2cd4-4540-bf42-64937185d899 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.464348] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.527626] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.540793] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9e82b8-4cc5-42d7-84d6-58963b20f773 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.568765] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2289.568765] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7ad9e52-c7e7-4189-a8e7-987ceea6aac7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.574207] env[63279]: DEBUG oslo_vmware.api [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2289.574207] env[63279]: value = "task-2087992" [ 2289.574207] env[63279]: _type = "Task" [ 2289.574207] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.584462] env[63279]: DEBUG oslo_vmware.api [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087992, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.706907] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Releasing lock "refresh_cache-a15141bc-424d-48ca-a6d5-c859a3639a0b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2289.707145] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updated the network info_cache for instance {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10518}} [ 2289.710679] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2289.710981] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2087985, 'name': ReconfigVM_Task, 'duration_secs': 1.316777} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.711214] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2289.711459] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Reconfigured VM instance instance-00000064 to attach disk [datastore1] d0b8c9dd-c683-4f3a-b819-d9d57d96636b/d0b8c9dd-c683-4f3a-b819-d9d57d96636b.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2289.711767] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating instance 'd0b8c9dd-c683-4f3a-b819-d9d57d96636b' progress to 50 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2289.715394] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2289.715800] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2289.715966] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2289.716135] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2289.716269] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 2289.716420] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2289.833555] env[63279]: DEBUG oslo_vmware.api [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2087989, 'name': PowerOnVM_Task, 'duration_secs': 0.792171} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.833872] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2289.834093] env[63279]: INFO nova.compute.manager [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Took 8.49 seconds to spawn the instance on the hypervisor. [ 2289.834282] env[63279]: DEBUG nova.compute.manager [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2289.835083] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306d991e-8a57-4530-8e95-21d1ed20860b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.882347] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]528e7746-9e27-20c3-232d-6e28282bbda2, 'name': SearchDatastore_Task, 'duration_secs': 0.012449} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.882656] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2289.882916] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84/69c88844-84c7-4272-a2c4-051f1499df84.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2289.883195] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c0465fa-5366-4e72-a5e2-7be4ff8438c8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.889985] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Waiting for the task: (returnval){ [ 2289.889985] env[63279]: value = "task-2087993" [ 2289.889985] env[63279]: _type = "Task" [ 2289.889985] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.899588] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087993, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.968430] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.031261] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.085582] env[63279]: DEBUG oslo_vmware.api [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087992, 'name': PowerOffVM_Task, 'duration_secs': 0.445841} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2290.086103] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2290.091258] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Reconfiguring VM instance instance-00000053 to detach disk 2002 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2290.091648] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fe3b0ca-dc71-4b47-b9c3-e265958a9053 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.111021] env[63279]: DEBUG oslo_vmware.api [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2290.111021] env[63279]: value = "task-2087994" [ 2290.111021] env[63279]: _type = "Task" [ 2290.111021] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.122227] env[63279]: DEBUG oslo_vmware.api [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087994, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.219441] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2290.219741] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2290.219741] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2290.219808] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2290.221198] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fec99d3-6d59-45fa-bf79-d101e6861527 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.224685] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53726a78-4fa7-4a29-9a45-eeb795667276 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.246433] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf82ab13-a0a9-435b-b0d6-b99be40b283d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.251568] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1e562c-298c-4138-a433-cd0201e41148 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.270688] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating instance 'd0b8c9dd-c683-4f3a-b819-d9d57d96636b' progress to 67 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2290.283398] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0250ce-8e4f-49fe-8218-a5362ea78b16 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.291203] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcd0931-0a8f-48bc-99aa-f40bb54d5963 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.324880] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179271MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2290.325140] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2290.325302] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2290.353985] env[63279]: INFO nova.compute.manager [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Took 15.02 seconds to build instance. [ 2290.400051] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087993, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.465432] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.530677] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.621487] env[63279]: DEBUG oslo_vmware.api [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087994, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.819849] env[63279]: DEBUG nova.network.neutron [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Port ee08f9b0-ce61-4fa5-bf67-3a97acafc55e binding to destination host cpu-1 is already ACTIVE {{(pid=63279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2290.855856] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f3d444d6-0789-4e27-be51-caf00e0a98ed tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "3de52a94-c1de-4b37-985c-9101417260e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.530s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2290.900620] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087993, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546984} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2290.900897] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84/69c88844-84c7-4272-a2c4-051f1499df84.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2290.901155] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2290.901426] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f05ea17-d338-4933-8612-5e47f1de74c1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.907755] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Waiting for the task: (returnval){ [ 2290.907755] env[63279]: value = "task-2087995" [ 2290.907755] env[63279]: _type = "Task" [ 2290.907755] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.915441] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087995, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.966064] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.030097] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.125130] env[63279]: DEBUG oslo_vmware.api [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087994, 'name': ReconfigVM_Task, 'duration_secs': 0.537741} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2291.125975] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Reconfigured VM instance instance-00000053 to detach disk 2002 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2291.125975] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2291.126294] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08dd5926-ad5f-45ca-95b3-c8d9934edd79 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.133517] env[63279]: DEBUG oslo_vmware.api [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2291.133517] env[63279]: value = "task-2087996" [ 2291.133517] env[63279]: _type = "Task" [ 2291.133517] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2291.145947] env[63279]: DEBUG oslo_vmware.api [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087996, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.338538] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Applying migration context for instance d0b8c9dd-c683-4f3a-b819-d9d57d96636b as it has an incoming, in-progress migration 060bd61a-428e-4028-bc25-f9b4d6b85fbb. Migration status is post-migrating {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2291.340486] env[63279]: INFO nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating resource usage from migration 060bd61a-428e-4028-bc25-f9b4d6b85fbb [ 2291.405024] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance fcdd3076-2b53-4850-9730-2f877e2cabfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.405160] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 4ce17bdb-4bed-4e06-af13-e4097b55e17d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.405306] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance a6758131-030e-4b33-a2c9-8864055a5bec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.405337] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 686a7ce2-2d07-411e-91d6-0471c55c3728 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.405500] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.405613] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 9b98a316-71da-45fb-b895-553f179fe7d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.405934] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 27e2917d-3cd0-4ad3-ab65-f85f7d97225f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.405934] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.405934] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 69b3269a-2ba3-4f5f-a29c-62518c93da3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.406078] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance a15141bc-424d-48ca-a6d5-c859a3639a0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.407311] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 1fca4e5c-fe2c-4b61-bed4-52c7770def7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.407311] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 19e10ee4-99d1-44b9-9354-4c162d541a1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.407311] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 69c88844-84c7-4272-a2c4-051f1499df84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.407311] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 6e452711-a79c-4560-b38f-9414c87e6683 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.407311] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 3de52a94-c1de-4b37-985c-9101417260e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.407311] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Migration 060bd61a-428e-4028-bc25-f9b4d6b85fbb is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2291.407311] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance d0b8c9dd-c683-4f3a-b819-d9d57d96636b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.407311] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2291.407311] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2291.425643] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087995, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082579} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2291.429266] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2291.430641] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7c1f05-906a-4033-97df-eee5fe925170 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.456790] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84/69c88844-84c7-4272-a2c4-051f1499df84.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2291.460546] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a04fdf2c-0e98-4719-a48f-26b37e9e291c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.485362] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.489043] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Waiting for the task: (returnval){ [ 2291.489043] env[63279]: value = "task-2087997" [ 2291.489043] env[63279]: _type = "Task" [ 2291.489043] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2291.497811] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087997, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.532592] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.646046] env[63279]: DEBUG oslo_vmware.api [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087996, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.649795] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e228635f-37d6-4c90-b9ad-9f0b75d4e167 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.657023] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ba125f-05d1-4991-9ba8-4a4878a67354 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.686263] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3223eb69-ac3a-45b5-9e90-ffe7b583a41b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.693644] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d82fbe1-9c26-4975-aede-0704a26056ce {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.706697] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2291.848663] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2291.848734] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2291.848891] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2291.971293] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.998494] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087997, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.031805] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.144065] env[63279]: DEBUG oslo_vmware.api [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2087996, 'name': PowerOnVM_Task, 'duration_secs': 0.770693} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2292.144199] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2292.144347] env[63279]: DEBUG nova.compute.manager [None req-89bf0608-c6f2-4105-8fd2-68c736831127 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2292.146019] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffcd797-52ae-401c-93f8-59c3d3ce5e76 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.209747] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2292.476779] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.499762] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087997, 'name': ReconfigVM_Task, 'duration_secs': 0.883995} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2292.500078] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84/69c88844-84c7-4272-a2c4-051f1499df84.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2292.500744] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62ec9a30-9959-40f7-bb21-730b29da1bbd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.507695] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Waiting for the task: (returnval){ [ 2292.507695] env[63279]: value = "task-2087998" [ 2292.507695] env[63279]: _type = "Task" [ 2292.507695] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2292.516550] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087998, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.530587] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.714904] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2292.715102] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.390s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2292.888483] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2292.888684] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2292.888874] env[63279]: DEBUG nova.network.neutron [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2292.970830] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "3de52a94-c1de-4b37-985c-9101417260e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2292.971070] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "3de52a94-c1de-4b37-985c-9101417260e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2292.971333] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "3de52a94-c1de-4b37-985c-9101417260e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2292.971552] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "3de52a94-c1de-4b37-985c-9101417260e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2292.971730] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "3de52a94-c1de-4b37-985c-9101417260e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2292.974027] env[63279]: INFO nova.compute.manager [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Terminating instance [ 2292.979856] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.017737] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087998, 'name': Rename_Task, 'duration_secs': 0.170133} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2293.018176] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2293.018269] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6762522-da1f-43d0-863e-9f574a51b1e6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.027319] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Waiting for the task: (returnval){ [ 2293.027319] env[63279]: value = "task-2087999" [ 2293.027319] env[63279]: _type = "Task" [ 2293.027319] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2293.034393] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.039124] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087999, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.474965] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.481798] env[63279]: DEBUG nova.compute.manager [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2293.482097] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2293.482855] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c89254-c519-4408-8e6b-bc790a2b3e84 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.490144] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2293.490489] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1156a3eb-7da9-488e-9b15-4fc8b82346ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.497631] env[63279]: DEBUG oslo_vmware.api [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2293.497631] env[63279]: value = "task-2088000" [ 2293.497631] env[63279]: _type = "Task" [ 2293.497631] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2293.511177] env[63279]: DEBUG oslo_vmware.api [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2088000, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.528546] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2293.528773] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2293.528878] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleting the datastore file [datastore1] 6e452711-a79c-4560-b38f-9414c87e6683 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2293.532543] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a6711ea-ccfb-4d1a-bed3-ee0da8ba2e3e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.539919] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.542284] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2293.542479] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2293.542694] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleting the datastore file [datastore1] a15141bc-424d-48ca-a6d5-c859a3639a0b {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2293.542999] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff86fa58-446c-4f13-8e29-7ed3c6b0de5b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.548269] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087999, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.549914] env[63279]: DEBUG oslo_vmware.api [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2293.549914] env[63279]: value = "task-2088001" [ 2293.549914] env[63279]: _type = "Task" [ 2293.549914] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2293.554188] env[63279]: DEBUG oslo_vmware.api [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2293.554188] env[63279]: value = "task-2088002" [ 2293.554188] env[63279]: _type = "Task" [ 2293.554188] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2293.560947] env[63279]: DEBUG oslo_vmware.api [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088001, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.565702] env[63279]: DEBUG oslo_vmware.api [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088002, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.719092] env[63279]: DEBUG nova.compute.manager [req-3b51312c-7e1b-442d-af55-d4e90ba20509 req-446a01f3-983f-44b3-95bb-aed2b3bb5fe8 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Received event network-changed-8f12bb0d-eec1-4c21-b319-372b37e319ca {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2293.719452] env[63279]: DEBUG nova.compute.manager [req-3b51312c-7e1b-442d-af55-d4e90ba20509 req-446a01f3-983f-44b3-95bb-aed2b3bb5fe8 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Refreshing instance network info cache due to event network-changed-8f12bb0d-eec1-4c21-b319-372b37e319ca. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2293.719797] env[63279]: DEBUG oslo_concurrency.lockutils [req-3b51312c-7e1b-442d-af55-d4e90ba20509 req-446a01f3-983f-44b3-95bb-aed2b3bb5fe8 service nova] Acquiring lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2293.720064] env[63279]: DEBUG oslo_concurrency.lockutils [req-3b51312c-7e1b-442d-af55-d4e90ba20509 req-446a01f3-983f-44b3-95bb-aed2b3bb5fe8 service nova] Acquired lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2293.720354] env[63279]: DEBUG nova.network.neutron [req-3b51312c-7e1b-442d-af55-d4e90ba20509 req-446a01f3-983f-44b3-95bb-aed2b3bb5fe8 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Refreshing network info cache for port 8f12bb0d-eec1-4c21-b319-372b37e319ca {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2293.762018] env[63279]: DEBUG nova.network.neutron [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating instance_info_cache with network_info: [{"id": "ee08f9b0-ce61-4fa5-bf67-3a97acafc55e", "address": "fa:16:3e:16:9a:ab", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee08f9b0-ce", "ovs_interfaceid": "ee08f9b0-ce61-4fa5-bf67-3a97acafc55e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2293.976100] env[63279]: DEBUG oslo_vmware.api [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2087966, 'name': ReconfigVM_Task, 'duration_secs': 9.139979} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2293.976418] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2293.976708] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Reconfigured VM to detach interface {{(pid=63279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 2294.014216] env[63279]: DEBUG oslo_vmware.api [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2088000, 'name': PowerOffVM_Task, 'duration_secs': 0.175322} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2294.014524] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2294.014701] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2294.014963] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bce672a-202d-47b3-932d-88250e16f1f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.035632] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2294.040717] env[63279]: DEBUG oslo_vmware.api [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Task: {'id': task-2087999, 'name': PowerOnVM_Task, 'duration_secs': 0.723008} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2294.041052] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2294.041319] env[63279]: DEBUG nova.compute.manager [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2294.042120] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225d2d68-fef9-4211-827c-ba13f11f18c4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.061178] env[63279]: DEBUG oslo_vmware.api [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145156} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2294.061828] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2294.062045] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2294.062217] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2294.062396] env[63279]: INFO nova.compute.manager [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Took 5.82 seconds to destroy the instance on the hypervisor. [ 2294.062718] env[63279]: DEBUG oslo.service.loopingcall [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2294.062970] env[63279]: DEBUG nova.compute.manager [-] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2294.063126] env[63279]: DEBUG nova.network.neutron [-] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2294.067894] env[63279]: DEBUG oslo_vmware.api [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088002, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149034} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2294.068432] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2294.068624] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2294.068808] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2294.068978] env[63279]: INFO nova.compute.manager [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Took 5.18 seconds to destroy the instance on the hypervisor. [ 2294.069228] env[63279]: DEBUG oslo.service.loopingcall [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2294.069460] env[63279]: DEBUG nova.compute.manager [-] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2294.069527] env[63279]: DEBUG nova.network.neutron [-] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2294.264815] env[63279]: DEBUG oslo_concurrency.lockutils [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2294.287481] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2294.287803] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2294.288047] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Deleting the datastore file [datastore1] 3de52a94-c1de-4b37-985c-9101417260e1 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2294.288334] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21c74eca-7728-4be0-a525-4b84445f7e75 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.295220] env[63279]: DEBUG oslo_vmware.api [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2294.295220] env[63279]: value = "task-2088004" [ 2294.295220] env[63279]: _type = "Task" [ 2294.295220] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2294.303881] env[63279]: DEBUG oslo_vmware.api [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2088004, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2294.539612] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task} progress is 95%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2294.560029] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2294.560364] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2294.560853] env[63279]: DEBUG nova.objects.instance [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2294.670762] env[63279]: DEBUG nova.network.neutron [req-3b51312c-7e1b-442d-af55-d4e90ba20509 req-446a01f3-983f-44b3-95bb-aed2b3bb5fe8 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Updated VIF entry in instance network info cache for port 8f12bb0d-eec1-4c21-b319-372b37e319ca. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2294.671187] env[63279]: DEBUG nova.network.neutron [req-3b51312c-7e1b-442d-af55-d4e90ba20509 req-446a01f3-983f-44b3-95bb-aed2b3bb5fe8 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Updating instance_info_cache with network_info: [{"id": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "address": "fa:16:3e:68:12:c8", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f12bb0d-ee", "ovs_interfaceid": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2294.797167] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2854ad-d373-452f-ac9b-1e9664653196 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.808776] env[63279]: DEBUG oslo_vmware.api [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2088004, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163626} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2294.826534] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2294.826766] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2294.829198] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2294.829198] env[63279]: INFO nova.compute.manager [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Took 1.35 seconds to destroy the instance on the hypervisor. [ 2294.829198] env[63279]: DEBUG oslo.service.loopingcall [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2294.829198] env[63279]: DEBUG nova.compute.manager [-] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2294.829198] env[63279]: DEBUG nova.network.neutron [-] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2294.833055] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52da60b-b515-4c86-9f54-0760dd04cd3a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.839188] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating instance 'd0b8c9dd-c683-4f3a-b819-d9d57d96636b' progress to 83 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2294.850566] env[63279]: DEBUG nova.compute.manager [req-466a0a3a-cd06-4c13-ac31-87af6770c3b6 req-3b02c0c3-2d3f-4ce6-9446-445a126abcc5 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Received event network-vif-deleted-cd0a8ed6-d1ee-4266-8bde-e866ac2873ce {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2294.850761] env[63279]: INFO nova.compute.manager [req-466a0a3a-cd06-4c13-ac31-87af6770c3b6 req-3b02c0c3-2d3f-4ce6-9446-445a126abcc5 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Neutron deleted interface cd0a8ed6-d1ee-4266-8bde-e866ac2873ce; detaching it from the instance and deleting it from the info cache [ 2294.850934] env[63279]: DEBUG nova.network.neutron [req-466a0a3a-cd06-4c13-ac31-87af6770c3b6 req-3b02c0c3-2d3f-4ce6-9446-445a126abcc5 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2294.940093] env[63279]: DEBUG nova.network.neutron [-] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2295.034491] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2087972, 'name': CloneVM_Task, 'duration_secs': 9.331695} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2295.034765] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Created linked-clone VM from snapshot [ 2295.035893] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-553e1c80-e20f-4040-b5f3-29d1e316e5c7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.043673] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Uploading image c074a49b-943f-49a5-8e98-ef631d5709ab {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2295.079077] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2295.079077] env[63279]: value = "vm-427772" [ 2295.079077] env[63279]: _type = "VirtualMachine" [ 2295.079077] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2295.079569] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5de6b29b-783a-4250-8d65-2e8f672ca15a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.087829] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lease: (returnval){ [ 2295.087829] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b425d8-81ef-4408-77a1-cc025c640d98" [ 2295.087829] env[63279]: _type = "HttpNfcLease" [ 2295.087829] env[63279]: } obtained for exporting VM: (result){ [ 2295.087829] env[63279]: value = "vm-427772" [ 2295.087829] env[63279]: _type = "VirtualMachine" [ 2295.087829] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2295.088226] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the lease: (returnval){ [ 2295.088226] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b425d8-81ef-4408-77a1-cc025c640d98" [ 2295.088226] env[63279]: _type = "HttpNfcLease" [ 2295.088226] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2295.095227] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2295.095227] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b425d8-81ef-4408-77a1-cc025c640d98" [ 2295.095227] env[63279]: _type = "HttpNfcLease" [ 2295.095227] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2295.174182] env[63279]: DEBUG oslo_concurrency.lockutils [req-3b51312c-7e1b-442d-af55-d4e90ba20509 req-446a01f3-983f-44b3-95bb-aed2b3bb5fe8 service nova] Releasing lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2295.279183] env[63279]: DEBUG nova.network.neutron [-] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2295.347488] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2295.347488] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bca9fbe9-c215-4680-ad7d-ac8e3b5b8e3c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.353577] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-22c9f30e-73b6-4480-888c-779fcaa26046 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.356899] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2295.357084] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2295.357266] env[63279]: DEBUG nova.network.neutron [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2295.358844] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2295.358844] env[63279]: value = "task-2088006" [ 2295.358844] env[63279]: _type = "Task" [ 2295.358844] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2295.366755] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c14e67-c510-4269-a947-6750f2365339 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.384804] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2088006, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2295.407501] env[63279]: DEBUG nova.compute.manager [req-466a0a3a-cd06-4c13-ac31-87af6770c3b6 req-3b02c0c3-2d3f-4ce6-9446-445a126abcc5 service nova] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Detach interface failed, port_id=cd0a8ed6-d1ee-4266-8bde-e866ac2873ce, reason: Instance a15141bc-424d-48ca-a6d5-c859a3639a0b could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2295.443244] env[63279]: INFO nova.compute.manager [-] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Took 1.38 seconds to deallocate network for instance. [ 2295.554222] env[63279]: DEBUG nova.network.neutron [-] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2295.568731] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f0c628bc-84f4-449b-bd4f-e7a1b56e90cd tempest-ServersAdmin275Test-329467495 tempest-ServersAdmin275Test-329467495-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2295.596333] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2295.596333] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b425d8-81ef-4408-77a1-cc025c640d98" [ 2295.596333] env[63279]: _type = "HttpNfcLease" [ 2295.596333] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2295.596824] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2295.596824] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b425d8-81ef-4408-77a1-cc025c640d98" [ 2295.596824] env[63279]: _type = "HttpNfcLease" [ 2295.596824] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2295.597601] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08df9967-550c-4ad8-acd2-40e7bb8d54c7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.605656] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5253d3c7-08c4-6096-fa73-01a3a2d9adee/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2295.605844] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5253d3c7-08c4-6096-fa73-01a3a2d9adee/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2295.756241] env[63279]: DEBUG nova.compute.manager [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Received event network-changed-8f12bb0d-eec1-4c21-b319-372b37e319ca {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2295.756447] env[63279]: DEBUG nova.compute.manager [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Refreshing instance network info cache due to event network-changed-8f12bb0d-eec1-4c21-b319-372b37e319ca. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2295.756700] env[63279]: DEBUG oslo_concurrency.lockutils [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] Acquiring lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2295.756851] env[63279]: DEBUG oslo_concurrency.lockutils [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] Acquired lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2295.757029] env[63279]: DEBUG nova.network.neutron [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Refreshing network info cache for port 8f12bb0d-eec1-4c21-b319-372b37e319ca {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2295.759335] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquiring lock "69c88844-84c7-4272-a2c4-051f1499df84" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2295.759964] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Lock "69c88844-84c7-4272-a2c4-051f1499df84" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2295.759964] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquiring lock "69c88844-84c7-4272-a2c4-051f1499df84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2295.759964] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Lock "69c88844-84c7-4272-a2c4-051f1499df84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2295.760304] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Lock "69c88844-84c7-4272-a2c4-051f1499df84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2295.762083] env[63279]: INFO nova.compute.manager [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Terminating instance [ 2295.775910] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3f2c5004-1d58-4211-a9f7-76f0b4f31995 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.781028] env[63279]: INFO nova.compute.manager [-] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Took 1.71 seconds to deallocate network for instance. [ 2295.872905] env[63279]: DEBUG oslo_vmware.api [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2088006, 'name': PowerOnVM_Task, 'duration_secs': 0.379801} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2295.872905] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2295.872905] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-60e19b22-fe2d-4d7d-8d6e-d53eab73e551 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating instance 'd0b8c9dd-c683-4f3a-b819-d9d57d96636b' progress to 100 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2295.950241] env[63279]: DEBUG oslo_concurrency.lockutils [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2295.950895] env[63279]: DEBUG oslo_concurrency.lockutils [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2295.950994] env[63279]: DEBUG nova.objects.instance [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lazy-loading 'resources' on Instance uuid 6e452711-a79c-4560-b38f-9414c87e6683 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2296.056712] env[63279]: INFO nova.compute.manager [-] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Took 1.23 seconds to deallocate network for instance. [ 2296.080569] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2296.080938] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2296.081206] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2296.081499] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2296.081840] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2296.085997] env[63279]: INFO nova.compute.manager [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Terminating instance [ 2296.120616] env[63279]: INFO nova.network.neutron [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Port f3d164be-d41c-4e5a-97f5-a50cb539a06a from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 2296.120904] env[63279]: INFO nova.network.neutron [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Port 34ef38ab-d2fc-4fee-9782-e01851e13c39 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 2296.121416] env[63279]: DEBUG nova.network.neutron [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updating instance_info_cache with network_info: [{"id": "0bd52488-d960-42c9-8077-fc8fe9eda956", "address": "fa:16:3e:16:45:3c", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bd52488-d9", "ovs_interfaceid": "0bd52488-d960-42c9-8077-fc8fe9eda956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2296.267053] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquiring lock "refresh_cache-69c88844-84c7-4272-a2c4-051f1499df84" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2296.267347] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquired lock "refresh_cache-69c88844-84c7-4272-a2c4-051f1499df84" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2296.267616] env[63279]: DEBUG nova.network.neutron [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2296.289560] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2296.499313] env[63279]: DEBUG nova.network.neutron [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Updated VIF entry in instance network info cache for port 8f12bb0d-eec1-4c21-b319-372b37e319ca. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2296.499695] env[63279]: DEBUG nova.network.neutron [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Updating instance_info_cache with network_info: [{"id": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "address": "fa:16:3e:68:12:c8", "network": {"id": "c1d7406d-6852-47cd-a4a3-de7373d03ab4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1990733857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fc9b60ae304455097b8be9a276796fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f12bb0d-ee", "ovs_interfaceid": "8f12bb0d-eec1-4c21-b319-372b37e319ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2296.563639] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2296.590525] env[63279]: DEBUG nova.compute.manager [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2296.590840] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2296.595219] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f966ad2-f14a-4216-9a06-68d0df2953bc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.603967] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2296.604323] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f5d5537-ff28-4b87-a9fc-5baade7798d0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.611244] env[63279]: DEBUG oslo_vmware.api [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2296.611244] env[63279]: value = "task-2088007" [ 2296.611244] env[63279]: _type = "Task" [ 2296.611244] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2296.622564] env[63279]: DEBUG oslo_vmware.api [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088007, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2296.626349] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "refresh_cache-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2296.707790] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99bc93d1-580a-4f77-ad22-99bc880ec301 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.718135] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425cc4e0-7033-4280-bd5a-35600f47c1fc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.756885] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c8f818-8a74-402b-abff-84f37b6862e9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.766996] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4969182-1e7c-470b-bbad-8cf25a7882dc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.793409] env[63279]: DEBUG nova.compute.provider_tree [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2296.802875] env[63279]: DEBUG nova.network.neutron [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2296.872972] env[63279]: DEBUG nova.network.neutron [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2296.879200] env[63279]: DEBUG nova.compute.manager [req-e1d89130-0b5f-4565-baf9-bcaff4a50a4f req-5616b1b9-79bf-4e14-a44c-eb3b5f046f62 service nova] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Received event network-vif-deleted-949bba7c-664a-44a6-bd3b-635bd99e3501 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2297.002772] env[63279]: DEBUG oslo_concurrency.lockutils [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] Releasing lock "refresh_cache-2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2297.004252] env[63279]: DEBUG nova.compute.manager [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Received event network-vif-deleted-f3d164be-d41c-4e5a-97f5-a50cb539a06a {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2297.004454] env[63279]: INFO nova.compute.manager [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Neutron deleted interface f3d164be-d41c-4e5a-97f5-a50cb539a06a; detaching it from the instance and deleting it from the info cache [ 2297.004845] env[63279]: DEBUG nova.network.neutron [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updating instance_info_cache with network_info: [{"id": "0bd52488-d960-42c9-8077-fc8fe9eda956", "address": "fa:16:3e:16:45:3c", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bd52488-d9", "ovs_interfaceid": "0bd52488-d960-42c9-8077-fc8fe9eda956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "34ef38ab-d2fc-4fee-9782-e01851e13c39", "address": "fa:16:3e:13:bf:29", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34ef38ab-d2", "ovs_interfaceid": "34ef38ab-d2fc-4fee-9782-e01851e13c39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2297.121991] env[63279]: DEBUG oslo_vmware.api [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088007, 'name': PowerOffVM_Task, 'duration_secs': 0.191077} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2297.122321] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2297.122577] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2297.122892] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82b74847-aa4a-42d6-995e-06039d367a18 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.130584] env[63279]: DEBUG oslo_concurrency.lockutils [None req-70fb686c-6512-468a-bd20-7f0c9ad16dab tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6-f3d164be-d41c-4e5a-97f5-a50cb539a06a" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 13.310s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2297.297049] env[63279]: DEBUG nova.scheduler.client.report [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2297.311583] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2297.311883] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2297.312334] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Deleting the datastore file [datastore1] 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2297.312762] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ce6cd25-15d4-4cf6-bd3d-920085b7ee54 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.319761] env[63279]: DEBUG oslo_vmware.api [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2297.319761] env[63279]: value = "task-2088009" [ 2297.319761] env[63279]: _type = "Task" [ 2297.319761] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2297.327892] env[63279]: DEBUG oslo_vmware.api [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088009, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2297.376390] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Releasing lock "refresh_cache-69c88844-84c7-4272-a2c4-051f1499df84" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2297.376971] env[63279]: DEBUG nova.compute.manager [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2297.377246] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2297.378362] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16349670-c2eb-4b90-9368-4ecf5e1002ab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.386463] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2297.387071] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e3cda42-d052-4b4f-a143-f5fb6b62051d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.400511] env[63279]: DEBUG oslo_vmware.api [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2297.400511] env[63279]: value = "task-2088010" [ 2297.400511] env[63279]: _type = "Task" [ 2297.400511] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2297.409848] env[63279]: DEBUG oslo_vmware.api [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2088010, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2297.508608] env[63279]: DEBUG oslo_concurrency.lockutils [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] Acquiring lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2297.803066] env[63279]: DEBUG oslo_concurrency.lockutils [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2297.805496] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.516s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2297.806291] env[63279]: DEBUG nova.objects.instance [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lazy-loading 'resources' on Instance uuid a15141bc-424d-48ca-a6d5-c859a3639a0b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2297.827069] env[63279]: INFO nova.scheduler.client.report [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleted allocations for instance 6e452711-a79c-4560-b38f-9414c87e6683 [ 2297.833322] env[63279]: DEBUG oslo_vmware.api [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088009, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165318} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2297.835904] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2297.836108] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2297.836307] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2297.836482] env[63279]: INFO nova.compute.manager [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Took 1.25 seconds to destroy the instance on the hypervisor. [ 2297.836791] env[63279]: DEBUG oslo.service.loopingcall [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2297.837208] env[63279]: DEBUG nova.compute.manager [-] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2297.837280] env[63279]: DEBUG nova.network.neutron [-] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2297.889439] env[63279]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 34ef38ab-d2fc-4fee-9782-e01851e13c39 could not be found.", "detail": ""}} {{(pid=63279) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2297.889702] env[63279]: DEBUG nova.network.neutron [-] Unable to show port 34ef38ab-d2fc-4fee-9782-e01851e13c39 as it no longer exists. {{(pid=63279) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 2297.910911] env[63279]: DEBUG oslo_vmware.api [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2088010, 'name': PowerOffVM_Task, 'duration_secs': 0.140367} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2297.911214] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2297.911434] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2297.911703] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-954159b9-0abf-4343-9cb9-666bfdd06e7d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.938318] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2297.938591] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2297.938780] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Deleting the datastore file [datastore1] 69c88844-84c7-4272-a2c4-051f1499df84 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2297.939118] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f502806-9220-443c-a382-1956a94c8c66 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.945623] env[63279]: DEBUG oslo_vmware.api [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for the task: (returnval){ [ 2297.945623] env[63279]: value = "task-2088012" [ 2297.945623] env[63279]: _type = "Task" [ 2297.945623] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2297.956894] env[63279]: DEBUG oslo_vmware.api [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2088012, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2298.340279] env[63279]: DEBUG oslo_concurrency.lockutils [None req-14f74f74-0446-4405-97e3-a1859425c751 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "6e452711-a79c-4560-b38f-9414c87e6683" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.604s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2298.459596] env[63279]: DEBUG oslo_vmware.api [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Task: {'id': task-2088012, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127814} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2298.459880] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2298.460086] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2298.460301] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2298.460489] env[63279]: INFO nova.compute.manager [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Took 1.08 seconds to destroy the instance on the hypervisor. [ 2298.460741] env[63279]: DEBUG oslo.service.loopingcall [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2298.461396] env[63279]: DEBUG nova.compute.manager [-] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2298.461396] env[63279]: DEBUG nova.network.neutron [-] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2298.478211] env[63279]: DEBUG nova.network.neutron [-] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2298.544282] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1f9fd4-cc29-41c3-8c40-567be6221bb2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.552529] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ced8367-4f94-4c13-82a8-2cade4e80455 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.587365] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505e53ab-8b80-4b57-9c30-a8ca54f55535 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.595798] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e176a82c-c205-4071-b892-bbdd76c062bc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.610386] env[63279]: DEBUG nova.compute.provider_tree [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2298.704271] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2298.704271] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2298.704271] env[63279]: DEBUG nova.compute.manager [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Going to confirm migration 5 {{(pid=63279) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 2298.881092] env[63279]: DEBUG nova.network.neutron [-] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2298.906067] env[63279]: DEBUG nova.compute.manager [req-d42e39e6-938a-404e-ba53-d478d0e3c611 req-d82b6a62-cb05-4133-98a1-18d2ec8bed89 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Received event network-vif-deleted-0bd52488-d960-42c9-8077-fc8fe9eda956 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2298.981450] env[63279]: DEBUG nova.network.neutron [-] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2299.114174] env[63279]: DEBUG nova.scheduler.client.report [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2299.242251] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2299.242542] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquired lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2299.242841] env[63279]: DEBUG nova.network.neutron [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2299.243350] env[63279]: DEBUG nova.objects.instance [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lazy-loading 'info_cache' on Instance uuid d0b8c9dd-c683-4f3a-b819-d9d57d96636b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2299.383493] env[63279]: INFO nova.compute.manager [-] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Took 1.55 seconds to deallocate network for instance. [ 2299.393894] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "df37c4f6-b492-4d6e-9ba1-950dbbb9a885" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2299.394173] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "df37c4f6-b492-4d6e-9ba1-950dbbb9a885" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2299.484017] env[63279]: INFO nova.compute.manager [-] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Took 1.02 seconds to deallocate network for instance. [ 2299.619621] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.814s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2299.622197] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.059s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2299.622482] env[63279]: DEBUG nova.objects.instance [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lazy-loading 'resources' on Instance uuid 3de52a94-c1de-4b37-985c-9101417260e1 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2299.641884] env[63279]: INFO nova.scheduler.client.report [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleted allocations for instance a15141bc-424d-48ca-a6d5-c859a3639a0b [ 2299.897027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2299.897027] env[63279]: DEBUG nova.compute.manager [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2299.990261] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2300.150551] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72b15d70-c5f2-4ad1-8e6e-7fb848b8b6ec tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "a15141bc-424d-48ca-a6d5-c859a3639a0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.768s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2300.341538] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d07e21-825d-48bf-a403-1bbfe0b329f4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.349548] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e9f2cb-4e81-4f4a-935d-21bc7278881f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.384382] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f6db5f-0fb9-487d-9c05-2fd7fbf17ab7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.393354] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff67bb76-0338-439c-8350-2c9fb5b84894 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.409641] env[63279]: DEBUG nova.compute.provider_tree [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2300.425627] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2300.485810] env[63279]: DEBUG nova.network.neutron [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating instance_info_cache with network_info: [{"id": "ee08f9b0-ce61-4fa5-bf67-3a97acafc55e", "address": "fa:16:3e:16:9a:ab", "network": {"id": "e2728625-1c28-407c-946b-97923d57b409", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1735124510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5a719a21fe248c49d0d0151d218866b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee08f9b0-ce", "ovs_interfaceid": "ee08f9b0-ce61-4fa5-bf67-3a97acafc55e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2300.916854] env[63279]: DEBUG nova.scheduler.client.report [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2300.988641] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Releasing lock "refresh_cache-d0b8c9dd-c683-4f3a-b819-d9d57d96636b" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2300.989188] env[63279]: DEBUG nova.objects.instance [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lazy-loading 'migration_context' on Instance uuid d0b8c9dd-c683-4f3a-b819-d9d57d96636b {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2301.424052] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.802s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2301.427153] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.531s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2301.427415] env[63279]: DEBUG nova.objects.instance [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'resources' on Instance uuid 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2301.447330] env[63279]: INFO nova.scheduler.client.report [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Deleted allocations for instance 3de52a94-c1de-4b37-985c-9101417260e1 [ 2301.492607] env[63279]: DEBUG nova.objects.base [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2301.493916] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbc7f63-f6ad-4bbc-9358-cf41fdcd0904 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.522519] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9fd8c28-8c78-4458-bb68-2ca96745849d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.531017] env[63279]: DEBUG oslo_vmware.api [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2301.531017] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52548080-bf62-3c33-abd1-67f6b26db6e1" [ 2301.531017] env[63279]: _type = "Task" [ 2301.531017] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2301.538745] env[63279]: DEBUG oslo_vmware.api [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52548080-bf62-3c33-abd1-67f6b26db6e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2301.958685] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6f67b215-8f62-4129-ac71-2dac24e2ee13 tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "3de52a94-c1de-4b37-985c-9101417260e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.987s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2302.041525] env[63279]: DEBUG oslo_vmware.api [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52548080-bf62-3c33-abd1-67f6b26db6e1, 'name': SearchDatastore_Task, 'duration_secs': 0.011063} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2302.041840] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2302.141912] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2f186a-081e-4247-9249-e0b1b184471e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.151236] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dafc1821-6dbc-4ed9-acc8-d11671556e42 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.184110] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a64bb6-e62a-4631-9fd4-95ac2ce0eb13 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.191507] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0700e96a-5be3-4136-9708-eae70d2808c9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.207596] env[63279]: DEBUG nova.compute.provider_tree [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2302.711338] env[63279]: DEBUG nova.scheduler.client.report [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2302.719434] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "efddec10-b496-446e-a05a-72c9f2d86ed9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2302.719711] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2303.221022] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.794s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2303.223633] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.234s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2303.224304] env[63279]: DEBUG nova.objects.instance [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Lazy-loading 'resources' on Instance uuid 69c88844-84c7-4272-a2c4-051f1499df84 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2303.225495] env[63279]: DEBUG nova.compute.manager [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2303.242792] env[63279]: INFO nova.scheduler.client.report [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Deleted allocations for instance 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6 [ 2303.703598] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5253d3c7-08c4-6096-fa73-01a3a2d9adee/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2303.704554] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036fe6ab-2735-4dea-9c04-140e376e3c3c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.711031] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5253d3c7-08c4-6096-fa73-01a3a2d9adee/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2303.711207] env[63279]: ERROR oslo_vmware.rw_handles [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5253d3c7-08c4-6096-fa73-01a3a2d9adee/disk-0.vmdk due to incomplete transfer. [ 2303.711430] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-35c594ac-2ead-4dfa-b32b-3e3f81ceea96 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.718152] env[63279]: DEBUG oslo_vmware.rw_handles [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5253d3c7-08c4-6096-fa73-01a3a2d9adee/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2303.718335] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Uploaded image c074a49b-943f-49a5-8e98-ef631d5709ab to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2303.720525] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2303.720747] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-88cdfae5-db9f-4356-9090-0b49966ba610 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.726341] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2303.726341] env[63279]: value = "task-2088014" [ 2303.726341] env[63279]: _type = "Task" [ 2303.726341] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2303.738973] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088014, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2303.751934] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2303.755356] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da49d5ea-c1c3-44c1-94c6-61f1aa74c43d tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.674s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2303.756576] env[63279]: DEBUG oslo_concurrency.lockutils [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] Acquired lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2303.757948] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d0f554-b781-452f-9cb3-4d364f727300 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.765570] env[63279]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 2303.765757] env[63279]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=63279) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 2303.766134] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-176f44b2-7da9-4b4e-9462-8e6e9c289835 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.777058] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a11d55-a1c2-4efb-9c7f-4b9787e00680 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.815706] env[63279]: ERROR root [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-427731' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-427731' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-427731' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-427731'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-427731' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-427731' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-427731'}\n"]: nova.exception.InstanceNotFound: Instance 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6 could not be found. [ 2303.816034] env[63279]: DEBUG oslo_concurrency.lockutils [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] Releasing lock "9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2303.816163] env[63279]: DEBUG nova.compute.manager [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Detach interface failed, port_id=f3d164be-d41c-4e5a-97f5-a50cb539a06a, reason: Instance 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2303.816357] env[63279]: DEBUG nova.compute.manager [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Received event network-vif-deleted-fffc97fe-3975-45bb-8c80-f12056750eb4 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2303.816618] env[63279]: DEBUG nova.compute.manager [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Received event network-vif-deleted-34ef38ab-d2fc-4fee-9782-e01851e13c39 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2303.816812] env[63279]: INFO nova.compute.manager [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Neutron deleted interface 34ef38ab-d2fc-4fee-9782-e01851e13c39; detaching it from the instance and deleting it from the info cache [ 2303.817087] env[63279]: DEBUG nova.network.neutron [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Updating instance_info_cache with network_info: [{"id": "0bd52488-d960-42c9-8077-fc8fe9eda956", "address": "fa:16:3e:16:45:3c", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bd52488-d9", "ovs_interfaceid": "0bd52488-d960-42c9-8077-fc8fe9eda956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2303.945561] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d53fb4-27d6-43ed-9639-c179ad6d8016 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.954265] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ab5518-8830-4f8e-bf1e-a53ccf834be3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.007976] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941684c9-575d-47e0-ad93-956fb2562b24 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.016370] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a533c9e6-77c3-424c-8327-6eadc67f9c70 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.031377] env[63279]: DEBUG nova.compute.provider_tree [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2304.239742] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088014, 'name': Destroy_Task, 'duration_secs': 0.489561} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2304.240135] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Destroyed the VM [ 2304.240545] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2304.240927] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7622d285-cc02-4f9d-af86-bce947de3130 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.248194] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2304.248194] env[63279]: value = "task-2088015" [ 2304.248194] env[63279]: _type = "Task" [ 2304.248194] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2304.259378] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088015, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2304.322734] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-77b42e80-f34e-4358-b963-6c47cd467d85 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.331896] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299cade3-1b5b-4ec2-9369-28e2941d7312 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.364703] env[63279]: DEBUG nova.compute.manager [req-8f0c9705-ad0a-4764-af61-4379effd5107 req-3e5ad985-5c7a-4178-8cb3-27a91af6ec43 service nova] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Detach interface failed, port_id=34ef38ab-d2fc-4fee-9782-e01851e13c39, reason: Instance 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2304.535205] env[63279]: DEBUG nova.scheduler.client.report [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2304.759694] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088015, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2305.041148] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.817s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2305.043618] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.618s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2305.045126] env[63279]: INFO nova.compute.claims [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2305.059672] env[63279]: INFO nova.scheduler.client.report [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Deleted allocations for instance 69c88844-84c7-4272-a2c4-051f1499df84 [ 2305.259134] env[63279]: DEBUG oslo_vmware.api [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088015, 'name': RemoveSnapshot_Task, 'duration_secs': 0.950756} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2305.259592] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2305.259688] env[63279]: INFO nova.compute.manager [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Took 21.85 seconds to snapshot the instance on the hypervisor. [ 2305.417610] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "6f839780-be92-4d99-a96d-1fc14c819599" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2305.417843] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "6f839780-be92-4d99-a96d-1fc14c819599" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2305.568583] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7ed1fc96-927f-4b0b-9714-e77ae6e649d6 tempest-ServersAdmin275Test-1209233635 tempest-ServersAdmin275Test-1209233635-project-member] Lock "69c88844-84c7-4272-a2c4-051f1499df84" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.809s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2305.822496] env[63279]: DEBUG nova.compute.manager [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Found 3 images (rotation: 2) {{(pid=63279) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 2305.822694] env[63279]: DEBUG nova.compute.manager [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Rotating out 1 backups {{(pid=63279) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 2305.822861] env[63279]: DEBUG nova.compute.manager [None req-a601b1b4-f84a-458b-8eca-889fd94aa5bd tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Deleting image fbac5115-107e-4a20-ba81-7d5fa6c21bc8 {{(pid=63279) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 2305.920697] env[63279]: DEBUG nova.compute.manager [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2306.242738] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6c3ceb-3589-4699-b0ea-405f6430d9da {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.250828] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-884caea2-26c3-4cc7-9185-85b21224386f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.279603] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aec26eb-cbd2-4677-b636-e4913c7344a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.286842] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6489a4-0620-42b0-b44c-01fcca260e70 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.299721] env[63279]: DEBUG nova.compute.provider_tree [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2306.320934] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "9b98a316-71da-45fb-b895-553f179fe7d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2306.321199] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "9b98a316-71da-45fb-b895-553f179fe7d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2306.321458] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "9b98a316-71da-45fb-b895-553f179fe7d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2306.321674] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "9b98a316-71da-45fb-b895-553f179fe7d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2306.321850] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "9b98a316-71da-45fb-b895-553f179fe7d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2306.323751] env[63279]: INFO nova.compute.manager [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Terminating instance [ 2306.441407] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2306.802435] env[63279]: DEBUG nova.scheduler.client.report [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2306.827189] env[63279]: DEBUG nova.compute.manager [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2306.827409] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2306.828318] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90fd320b-7ef9-4fce-aa71-85c1f7b1ab6e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.836307] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2306.836549] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2263bd5d-2a0c-42a4-8adf-82d5e7bc9f1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.843123] env[63279]: DEBUG oslo_vmware.api [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2306.843123] env[63279]: value = "task-2088016" [ 2306.843123] env[63279]: _type = "Task" [ 2306.843123] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2306.853283] env[63279]: DEBUG oslo_vmware.api [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2088016, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2307.307388] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.264s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2307.307919] env[63279]: DEBUG nova.compute.manager [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2307.310506] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 5.269s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2307.353109] env[63279]: DEBUG oslo_vmware.api [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2088016, 'name': PowerOffVM_Task, 'duration_secs': 0.231915} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2307.353773] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2307.353773] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2307.353773] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3cec7eea-181b-4747-8732-35a6e8cb0735 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.413216] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7f5d90fd-66b2-457c-ad88-480a7d60f2b7 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "686a7ce2-2d07-411e-91d6-0471c55c3728" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2307.413491] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7f5d90fd-66b2-457c-ad88-480a7d60f2b7 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "686a7ce2-2d07-411e-91d6-0471c55c3728" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2307.413682] env[63279]: DEBUG nova.compute.manager [None req-7f5d90fd-66b2-457c-ad88-480a7d60f2b7 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2307.415062] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c77e972-d3f9-4466-84d6-721e369a637b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.421649] env[63279]: DEBUG nova.compute.manager [None req-7f5d90fd-66b2-457c-ad88-480a7d60f2b7 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2307.422222] env[63279]: DEBUG nova.objects.instance [None req-7f5d90fd-66b2-457c-ad88-480a7d60f2b7 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'flavor' on Instance uuid 686a7ce2-2d07-411e-91d6-0471c55c3728 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2307.565909] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2307.566167] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2307.566361] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Deleting the datastore file [datastore1] 9b98a316-71da-45fb-b895-553f179fe7d9 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2307.566673] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b473469f-f707-4e57-932e-e25f1d10f374 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.573093] env[63279]: DEBUG oslo_vmware.api [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for the task: (returnval){ [ 2307.573093] env[63279]: value = "task-2088018" [ 2307.573093] env[63279]: _type = "Task" [ 2307.573093] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2307.581519] env[63279]: DEBUG oslo_vmware.api [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2088018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2307.814100] env[63279]: DEBUG nova.compute.utils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2307.815597] env[63279]: DEBUG nova.compute.manager [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2307.815772] env[63279]: DEBUG nova.network.neutron [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2307.868188] env[63279]: DEBUG nova.policy [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55199c771de04067a936194078ef99f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4fcef39e334249afb9636455802059c5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2308.010850] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f58b958-4517-4e73-82d5-e9605db1f64b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.018746] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241382ab-bc60-4332-9cd3-911e339ec8e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.052717] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436731e7-40b7-4d36-bcf9-4ed5ba58667d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.060237] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9286253d-e49e-487b-b4d9-0ab45bb6f86a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.074278] env[63279]: DEBUG nova.compute.provider_tree [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2308.084401] env[63279]: DEBUG oslo_vmware.api [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Task: {'id': task-2088018, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127058} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2308.084543] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2308.084762] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2308.084963] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2308.085179] env[63279]: INFO nova.compute.manager [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Took 1.26 seconds to destroy the instance on the hypervisor. [ 2308.085440] env[63279]: DEBUG oslo.service.loopingcall [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2308.086355] env[63279]: DEBUG nova.compute.manager [-] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2308.086463] env[63279]: DEBUG nova.network.neutron [-] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2308.147719] env[63279]: DEBUG nova.network.neutron [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Successfully created port: 086b189c-99af-4fd5-a587-66ec372c40fe {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2308.321797] env[63279]: DEBUG nova.compute.manager [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2308.428660] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f5d90fd-66b2-457c-ad88-480a7d60f2b7 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2308.428981] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22ae1f6c-47b6-4ea0-8794-bf1f410da287 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.436855] env[63279]: DEBUG oslo_vmware.api [None req-7f5d90fd-66b2-457c-ad88-480a7d60f2b7 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2308.436855] env[63279]: value = "task-2088019" [ 2308.436855] env[63279]: _type = "Task" [ 2308.436855] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2308.445415] env[63279]: DEBUG oslo_vmware.api [None req-7f5d90fd-66b2-457c-ad88-480a7d60f2b7 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088019, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2308.608341] env[63279]: ERROR nova.scheduler.client.report [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [req-b9e2f9d2-6149-4245-9ec2-16a7b23e07c8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b9e2f9d2-6149-4245-9ec2-16a7b23e07c8"}]} [ 2308.627818] env[63279]: DEBUG nova.scheduler.client.report [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2308.645434] env[63279]: DEBUG nova.scheduler.client.report [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2308.645739] env[63279]: DEBUG nova.compute.provider_tree [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2308.661593] env[63279]: DEBUG nova.scheduler.client.report [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2308.681983] env[63279]: DEBUG nova.scheduler.client.report [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2308.910091] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037d98e9-124e-477c-abb7-71ad324a9e12 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.917942] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e8929a-1728-46a2-9b04-112c6310d880 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.961674] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc7aac1-1118-47d3-ab07-f52c88bcfebc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.974989] env[63279]: DEBUG oslo_vmware.api [None req-7f5d90fd-66b2-457c-ad88-480a7d60f2b7 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088019, 'name': PowerOffVM_Task, 'duration_secs': 0.218798} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2308.976751] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50990d8-e2f8-42b6-a9db-b930ea51d0a0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.980969] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f5d90fd-66b2-457c-ad88-480a7d60f2b7 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2308.981392] env[63279]: DEBUG nova.compute.manager [None req-7f5d90fd-66b2-457c-ad88-480a7d60f2b7 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2308.982736] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b560d710-847d-4f3a-9ae9-66ce0c82d625 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.995692] env[63279]: DEBUG nova.compute.provider_tree [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2309.002810] env[63279]: DEBUG nova.compute.manager [req-e12fe19d-6d64-4f27-a3da-de43757e7981 req-fe5285e1-bebb-4391-876d-624ad0870387 service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Received event network-vif-deleted-162a4844-743a-4cba-b137-f35170a3d072 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2309.002810] env[63279]: INFO nova.compute.manager [req-e12fe19d-6d64-4f27-a3da-de43757e7981 req-fe5285e1-bebb-4391-876d-624ad0870387 service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Neutron deleted interface 162a4844-743a-4cba-b137-f35170a3d072; detaching it from the instance and deleting it from the info cache [ 2309.002810] env[63279]: DEBUG nova.network.neutron [req-e12fe19d-6d64-4f27-a3da-de43757e7981 req-fe5285e1-bebb-4391-876d-624ad0870387 service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2309.333494] env[63279]: DEBUG nova.compute.manager [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2309.360283] env[63279]: DEBUG nova.virt.hardware [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2309.360556] env[63279]: DEBUG nova.virt.hardware [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2309.360746] env[63279]: DEBUG nova.virt.hardware [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2309.360965] env[63279]: DEBUG nova.virt.hardware [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2309.361159] env[63279]: DEBUG nova.virt.hardware [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2309.361323] env[63279]: DEBUG nova.virt.hardware [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2309.361575] env[63279]: DEBUG nova.virt.hardware [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2309.361761] env[63279]: DEBUG nova.virt.hardware [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2309.361971] env[63279]: DEBUG nova.virt.hardware [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2309.362161] env[63279]: DEBUG nova.virt.hardware [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2309.362357] env[63279]: DEBUG nova.virt.hardware [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2309.363302] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778981f1-4cec-4231-8d11-e78e35629188 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.372153] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6cbb70-2d7c-45aa-97d6-367fb16ea858 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.476189] env[63279]: DEBUG nova.network.neutron [-] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2309.509075] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7f5d90fd-66b2-457c-ad88-480a7d60f2b7 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "686a7ce2-2d07-411e-91d6-0471c55c3728" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.095s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2309.510756] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0396a24d-174d-4ae6-95be-a3f6215c35a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.520658] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41f41ef-dfc5-4feb-9fdb-c575d9e99fe3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.537551] env[63279]: DEBUG nova.scheduler.client.report [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 145 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2309.537818] env[63279]: DEBUG nova.compute.provider_tree [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 145 to 146 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2309.537937] env[63279]: DEBUG nova.compute.provider_tree [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2309.557269] env[63279]: DEBUG nova.compute.manager [req-e12fe19d-6d64-4f27-a3da-de43757e7981 req-fe5285e1-bebb-4391-876d-624ad0870387 service nova] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Detach interface failed, port_id=162a4844-743a-4cba-b137-f35170a3d072, reason: Instance 9b98a316-71da-45fb-b895-553f179fe7d9 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2309.981557] env[63279]: INFO nova.compute.manager [-] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Took 1.89 seconds to deallocate network for instance. [ 2310.084504] env[63279]: DEBUG nova.network.neutron [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Successfully updated port: 086b189c-99af-4fd5-a587-66ec372c40fe {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2310.488261] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2310.549176] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.238s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2310.552185] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.800s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2310.554012] env[63279]: INFO nova.compute.claims [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2310.585478] env[63279]: DEBUG nova.compute.manager [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Stashing vm_state: stopped {{(pid=63279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2310.589198] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "refresh_cache-df37c4f6-b492-4d6e-9ba1-950dbbb9a885" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2310.589364] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "refresh_cache-df37c4f6-b492-4d6e-9ba1-950dbbb9a885" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2310.589498] env[63279]: DEBUG nova.network.neutron [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2311.015741] env[63279]: DEBUG nova.compute.manager [req-05c154ea-37b9-48af-a62a-4c839f49afd0 req-2ce4c090-c9c7-4510-968c-80daa8a962b1 service nova] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Received event network-vif-plugged-086b189c-99af-4fd5-a587-66ec372c40fe {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2311.016014] env[63279]: DEBUG oslo_concurrency.lockutils [req-05c154ea-37b9-48af-a62a-4c839f49afd0 req-2ce4c090-c9c7-4510-968c-80daa8a962b1 service nova] Acquiring lock "df37c4f6-b492-4d6e-9ba1-950dbbb9a885-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2311.016236] env[63279]: DEBUG oslo_concurrency.lockutils [req-05c154ea-37b9-48af-a62a-4c839f49afd0 req-2ce4c090-c9c7-4510-968c-80daa8a962b1 service nova] Lock "df37c4f6-b492-4d6e-9ba1-950dbbb9a885-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2311.016507] env[63279]: DEBUG oslo_concurrency.lockutils [req-05c154ea-37b9-48af-a62a-4c839f49afd0 req-2ce4c090-c9c7-4510-968c-80daa8a962b1 service nova] Lock "df37c4f6-b492-4d6e-9ba1-950dbbb9a885-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2311.016687] env[63279]: DEBUG nova.compute.manager [req-05c154ea-37b9-48af-a62a-4c839f49afd0 req-2ce4c090-c9c7-4510-968c-80daa8a962b1 service nova] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] No waiting events found dispatching network-vif-plugged-086b189c-99af-4fd5-a587-66ec372c40fe {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2311.016859] env[63279]: WARNING nova.compute.manager [req-05c154ea-37b9-48af-a62a-4c839f49afd0 req-2ce4c090-c9c7-4510-968c-80daa8a962b1 service nova] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Received unexpected event network-vif-plugged-086b189c-99af-4fd5-a587-66ec372c40fe for instance with vm_state building and task_state spawning. [ 2311.017037] env[63279]: DEBUG nova.compute.manager [req-05c154ea-37b9-48af-a62a-4c839f49afd0 req-2ce4c090-c9c7-4510-968c-80daa8a962b1 service nova] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Received event network-changed-086b189c-99af-4fd5-a587-66ec372c40fe {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2311.017202] env[63279]: DEBUG nova.compute.manager [req-05c154ea-37b9-48af-a62a-4c839f49afd0 req-2ce4c090-c9c7-4510-968c-80daa8a962b1 service nova] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Refreshing instance network info cache due to event network-changed-086b189c-99af-4fd5-a587-66ec372c40fe. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2311.017376] env[63279]: DEBUG oslo_concurrency.lockutils [req-05c154ea-37b9-48af-a62a-4c839f49afd0 req-2ce4c090-c9c7-4510-968c-80daa8a962b1 service nova] Acquiring lock "refresh_cache-df37c4f6-b492-4d6e-9ba1-950dbbb9a885" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2311.105713] env[63279]: DEBUG oslo_concurrency.lockutils [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2311.114167] env[63279]: INFO nova.scheduler.client.report [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleted allocation for migration 060bd61a-428e-4028-bc25-f9b4d6b85fbb [ 2311.127848] env[63279]: DEBUG nova.network.neutron [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2311.264264] env[63279]: DEBUG nova.network.neutron [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Updating instance_info_cache with network_info: [{"id": "086b189c-99af-4fd5-a587-66ec372c40fe", "address": "fa:16:3e:80:0a:6e", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap086b189c-99", "ovs_interfaceid": "086b189c-99af-4fd5-a587-66ec372c40fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2311.619742] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 12.917s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2311.740416] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc0307a-3db2-4a05-93c8-3c65b9502aa2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.748170] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9f2e1d-21ce-460e-906a-73b8c758710e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.779355] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "refresh_cache-df37c4f6-b492-4d6e-9ba1-950dbbb9a885" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2311.779678] env[63279]: DEBUG nova.compute.manager [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Instance network_info: |[{"id": "086b189c-99af-4fd5-a587-66ec372c40fe", "address": "fa:16:3e:80:0a:6e", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap086b189c-99", "ovs_interfaceid": "086b189c-99af-4fd5-a587-66ec372c40fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2311.780537] env[63279]: DEBUG oslo_concurrency.lockutils [req-05c154ea-37b9-48af-a62a-4c839f49afd0 req-2ce4c090-c9c7-4510-968c-80daa8a962b1 service nova] Acquired lock "refresh_cache-df37c4f6-b492-4d6e-9ba1-950dbbb9a885" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2311.780723] env[63279]: DEBUG nova.network.neutron [req-05c154ea-37b9-48af-a62a-4c839f49afd0 req-2ce4c090-c9c7-4510-968c-80daa8a962b1 service nova] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Refreshing network info cache for port 086b189c-99af-4fd5-a587-66ec372c40fe {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2311.781856] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:0a:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9fa933df-d66f-485e-8cf9-eda7f1a7f283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '086b189c-99af-4fd5-a587-66ec372c40fe', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2311.788986] env[63279]: DEBUG oslo.service.loopingcall [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2311.789667] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af67b278-09ab-4f3a-bd9d-8ecdbc504618 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.794970] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2311.795671] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13f6524e-16bc-486e-babb-12897d4f44d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.816711] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28809514-860f-451a-93cf-b414bfa05098 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.820761] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2311.820761] env[63279]: value = "task-2088020" [ 2311.820761] env[63279]: _type = "Task" [ 2311.820761] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.832138] env[63279]: DEBUG nova.compute.provider_tree [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2311.837921] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088020, 'name': CreateVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2312.001302] env[63279]: DEBUG nova.network.neutron [req-05c154ea-37b9-48af-a62a-4c839f49afd0 req-2ce4c090-c9c7-4510-968c-80daa8a962b1 service nova] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Updated VIF entry in instance network info cache for port 086b189c-99af-4fd5-a587-66ec372c40fe. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2312.001668] env[63279]: DEBUG nova.network.neutron [req-05c154ea-37b9-48af-a62a-4c839f49afd0 req-2ce4c090-c9c7-4510-968c-80daa8a962b1 service nova] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Updating instance_info_cache with network_info: [{"id": "086b189c-99af-4fd5-a587-66ec372c40fe", "address": "fa:16:3e:80:0a:6e", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap086b189c-99", "ovs_interfaceid": "086b189c-99af-4fd5-a587-66ec372c40fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2312.331966] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088020, 'name': CreateVM_Task, 'duration_secs': 0.438484} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2312.332155] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2312.332797] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2312.332966] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2312.333311] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2312.333557] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46824060-4203-4454-8728-22bb10cfd99a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.337811] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2312.337811] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52953e0d-2b07-63aa-f004-9137a783a5f4" [ 2312.337811] env[63279]: _type = "Task" [ 2312.337811] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2312.341330] env[63279]: DEBUG nova.scheduler.client.report [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2312.348918] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52953e0d-2b07-63aa-f004-9137a783a5f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2312.504569] env[63279]: DEBUG oslo_concurrency.lockutils [req-05c154ea-37b9-48af-a62a-4c839f49afd0 req-2ce4c090-c9c7-4510-968c-80daa8a962b1 service nova] Releasing lock "refresh_cache-df37c4f6-b492-4d6e-9ba1-950dbbb9a885" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2312.731557] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2312.731853] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2312.732049] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2312.732251] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2312.732430] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.734468] env[63279]: INFO nova.compute.manager [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Terminating instance [ 2312.849041] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.296s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.849041] env[63279]: DEBUG nova.compute.manager [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2312.851341] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52953e0d-2b07-63aa-f004-9137a783a5f4, 'name': SearchDatastore_Task, 'duration_secs': 0.010936} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2312.851789] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.411s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2312.853193] env[63279]: INFO nova.compute.claims [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2312.855513] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2312.855733] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2312.855966] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2312.856133] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2312.856317] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2312.856722] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2ccccb9-9fa0-4f93-9980-679c867edb56 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.865108] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2312.865292] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2312.865979] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40798f51-c109-4db9-b756-d85f26f76ba3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.871056] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2312.871056] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526503af-94d6-3125-eaf0-3e8e1ed00c66" [ 2312.871056] env[63279]: _type = "Task" [ 2312.871056] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2312.878426] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526503af-94d6-3125-eaf0-3e8e1ed00c66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2313.237883] env[63279]: DEBUG nova.compute.manager [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2313.238140] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2313.239018] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35d32b1-d71a-4943-9340-4fa09e40d312 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.246926] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2313.247171] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18e5104b-88b1-49a4-ad26-00bbcd6adadd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.252777] env[63279]: DEBUG oslo_vmware.api [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2313.252777] env[63279]: value = "task-2088021" [ 2313.252777] env[63279]: _type = "Task" [ 2313.252777] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2313.260614] env[63279]: DEBUG oslo_vmware.api [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2088021, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2313.353512] env[63279]: DEBUG nova.compute.utils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2313.354963] env[63279]: DEBUG nova.compute.manager [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2313.355212] env[63279]: DEBUG nova.network.neutron [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2313.381955] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]526503af-94d6-3125-eaf0-3e8e1ed00c66, 'name': SearchDatastore_Task, 'duration_secs': 0.008748} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2313.382739] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10ae2b1d-b7e8-42f5-a2e4-b1c9db2784d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.390902] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2313.390902] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52afce77-ad8f-1cdc-b770-e8de85164e65" [ 2313.390902] env[63279]: _type = "Task" [ 2313.390902] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2313.399518] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52afce77-ad8f-1cdc-b770-e8de85164e65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2313.401049] env[63279]: DEBUG nova.policy [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d273c47f9cc4928a396c459dc0bb006', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '131db3d2daa24712b6e11592cf789b33', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2313.699921] env[63279]: DEBUG nova.network.neutron [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Successfully created port: cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2313.762305] env[63279]: DEBUG oslo_vmware.api [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2088021, 'name': PowerOffVM_Task, 'duration_secs': 0.213513} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2313.762592] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2313.762751] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2313.762997] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32262f53-5504-4cec-afb2-a999e9314833 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.858599] env[63279]: DEBUG nova.compute.manager [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2313.900483] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52afce77-ad8f-1cdc-b770-e8de85164e65, 'name': SearchDatastore_Task, 'duration_secs': 0.012183} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2313.903434] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2313.903749] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] df37c4f6-b492-4d6e-9ba1-950dbbb9a885/df37c4f6-b492-4d6e-9ba1-950dbbb9a885.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2313.904274] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d95a19f-ac81-4d70-b1b5-0968fd1acfbb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.913178] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2313.913178] env[63279]: value = "task-2088023" [ 2313.913178] env[63279]: _type = "Task" [ 2313.913178] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2313.924403] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088023, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.067969] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be46286-ec06-4e4d-8eae-5c464464069c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.076144] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9639f5-66c4-494b-9a5d-e6c9ac2c6562 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.107239] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b220a10-a9dc-4b1c-9ce9-0c47462dc91b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.115112] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a834b0a6-853e-4283-ab60-f2c63b36ad8d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.129466] env[63279]: DEBUG nova.compute.provider_tree [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2314.426148] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088023, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454914} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2314.426419] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] df37c4f6-b492-4d6e-9ba1-950dbbb9a885/df37c4f6-b492-4d6e-9ba1-950dbbb9a885.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2314.426634] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2314.426891] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-91c5d035-f8af-4072-970a-753e55796ee5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.433417] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2314.433417] env[63279]: value = "task-2088024" [ 2314.433417] env[63279]: _type = "Task" [ 2314.433417] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2314.442125] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088024, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.633116] env[63279]: DEBUG nova.scheduler.client.report [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2314.872570] env[63279]: DEBUG nova.compute.manager [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2314.906903] env[63279]: DEBUG nova.virt.hardware [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2314.907162] env[63279]: DEBUG nova.virt.hardware [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2314.907324] env[63279]: DEBUG nova.virt.hardware [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2314.907510] env[63279]: DEBUG nova.virt.hardware [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2314.907658] env[63279]: DEBUG nova.virt.hardware [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2314.907805] env[63279]: DEBUG nova.virt.hardware [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2314.908015] env[63279]: DEBUG nova.virt.hardware [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2314.908197] env[63279]: DEBUG nova.virt.hardware [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2314.908422] env[63279]: DEBUG nova.virt.hardware [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2314.908607] env[63279]: DEBUG nova.virt.hardware [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2314.908784] env[63279]: DEBUG nova.virt.hardware [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2314.909920] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42409ebd-f3e2-4037-b78c-66f6423983bc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.917824] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122a5ef5-1ead-4720-85d8-943216b4a573 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.940465] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088024, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0616} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2314.940704] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2314.941412] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72389802-3bd3-4899-a692-04a83466fff0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.962897] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] df37c4f6-b492-4d6e-9ba1-950dbbb9a885/df37c4f6-b492-4d6e-9ba1-950dbbb9a885.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2314.963161] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90b70657-0a4f-4163-87f4-0ffd4c2c9fd0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.981747] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2314.981747] env[63279]: value = "task-2088025" [ 2314.981747] env[63279]: _type = "Task" [ 2314.981747] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2314.989109] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088025, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2315.138342] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.286s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2315.138963] env[63279]: DEBUG nova.compute.manager [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2315.141884] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.654s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2315.143564] env[63279]: DEBUG nova.objects.instance [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lazy-loading 'resources' on Instance uuid 9b98a316-71da-45fb-b895-553f179fe7d9 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2315.493051] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088025, 'name': ReconfigVM_Task, 'duration_secs': 0.284418} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2315.493051] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Reconfigured VM instance instance-00000067 to attach disk [datastore1] df37c4f6-b492-4d6e-9ba1-950dbbb9a885/df37c4f6-b492-4d6e-9ba1-950dbbb9a885.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2315.493345] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1fc4e584-4e67-4526-8666-5ee78ff3d610 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.500588] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2315.500588] env[63279]: value = "task-2088026" [ 2315.500588] env[63279]: _type = "Task" [ 2315.500588] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2315.511017] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088026, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2315.644814] env[63279]: DEBUG nova.compute.utils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2315.646366] env[63279]: DEBUG nova.compute.manager [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2315.646628] env[63279]: DEBUG nova.network.neutron [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2315.699041] env[63279]: DEBUG nova.policy [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6343b207f7294f5fa2a8111940083fb0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5b21bc5072e4945a19a782dd9561709', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2315.829022] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad887eec-6c74-4aed-888c-61244a5271d9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.837167] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5bc5c0-c657-4cc0-8a82-28912d5eea91 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.867796] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c9afe5-ceed-4963-a7bf-8c95788550b1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.875146] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d880efaf-c250-47b7-a477-d523d58a611e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.887820] env[63279]: DEBUG nova.compute.provider_tree [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2315.965356] env[63279]: DEBUG nova.network.neutron [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Successfully created port: d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2316.011484] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088026, 'name': Rename_Task, 'duration_secs': 0.140436} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2316.011484] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2316.011565] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e508660e-4991-4a4e-8922-9d43ec4a0696 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.018038] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2316.018038] env[63279]: value = "task-2088027" [ 2316.018038] env[63279]: _type = "Task" [ 2316.018038] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2316.025873] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088027, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2316.154196] env[63279]: DEBUG nova.compute.manager [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2316.407481] env[63279]: ERROR nova.scheduler.client.report [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] [req-f9372d1b-d38c-4ba6-a9a7-a48c26454264] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f9372d1b-d38c-4ba6-a9a7-a48c26454264"}]} [ 2316.425947] env[63279]: DEBUG nova.scheduler.client.report [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2316.441423] env[63279]: DEBUG nova.scheduler.client.report [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2316.441663] env[63279]: DEBUG nova.compute.provider_tree [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2316.453051] env[63279]: DEBUG nova.scheduler.client.report [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2316.471303] env[63279]: DEBUG nova.scheduler.client.report [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2316.532555] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088027, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2316.640219] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Acquiring lock "27e2917d-3cd0-4ad3-ab65-f85f7d97225f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2316.640549] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Lock "27e2917d-3cd0-4ad3-ab65-f85f7d97225f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2316.640763] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Acquiring lock "27e2917d-3cd0-4ad3-ab65-f85f7d97225f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2316.640950] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Lock "27e2917d-3cd0-4ad3-ab65-f85f7d97225f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2316.642035] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Lock "27e2917d-3cd0-4ad3-ab65-f85f7d97225f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2316.643389] env[63279]: INFO nova.compute.manager [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Terminating instance [ 2316.768688] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8cd9c94-b016-498d-b2cf-704a77871c93 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.776220] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8380120d-a1ca-4c24-b3be-3fdbf32939d7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.804960] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa20f6e5-4a96-4a11-98bb-b0d3b8eb2aa0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.811693] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d826aa-d2f1-4c5d-9527-1103101be097 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.824219] env[63279]: DEBUG nova.compute.provider_tree [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2316.934311] env[63279]: INFO nova.compute.manager [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Rebuilding instance [ 2316.982703] env[63279]: DEBUG nova.compute.manager [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2316.983740] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc33dfa-d6cf-4d7b-b9ed-e78fcd7dc798 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.029762] env[63279]: DEBUG oslo_vmware.api [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088027, 'name': PowerOnVM_Task, 'duration_secs': 0.995244} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2317.030091] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2317.030334] env[63279]: INFO nova.compute.manager [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Took 7.70 seconds to spawn the instance on the hypervisor. [ 2317.030541] env[63279]: DEBUG nova.compute.manager [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2317.031396] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a813b5df-8dcf-4850-8605-d0a5695b3fec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.149657] env[63279]: DEBUG nova.compute.manager [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2317.149884] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2317.150828] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c552bb6-20fa-4e69-87fa-014479c77650 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.158240] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2317.158488] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c679714c-7c38-4918-ba5f-52ee31b86013 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.160722] env[63279]: DEBUG nova.compute.manager [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2317.167367] env[63279]: DEBUG oslo_vmware.api [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Waiting for the task: (returnval){ [ 2317.167367] env[63279]: value = "task-2088028" [ 2317.167367] env[63279]: _type = "Task" [ 2317.167367] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2317.174460] env[63279]: DEBUG oslo_vmware.api [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2088028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2317.182423] env[63279]: DEBUG nova.virt.hardware [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2317.182660] env[63279]: DEBUG nova.virt.hardware [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2317.182824] env[63279]: DEBUG nova.virt.hardware [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2317.183017] env[63279]: DEBUG nova.virt.hardware [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2317.183174] env[63279]: DEBUG nova.virt.hardware [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2317.183321] env[63279]: DEBUG nova.virt.hardware [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2317.183548] env[63279]: DEBUG nova.virt.hardware [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2317.183709] env[63279]: DEBUG nova.virt.hardware [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2317.183872] env[63279]: DEBUG nova.virt.hardware [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2317.184043] env[63279]: DEBUG nova.virt.hardware [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2317.184221] env[63279]: DEBUG nova.virt.hardware [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2317.184950] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f40449-5554-43d8-b88f-1ea827506b2b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.191594] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ef2572-8a3c-4773-aebb-76a93eb222b0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.357940] env[63279]: DEBUG nova.scheduler.client.report [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 149 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2317.358225] env[63279]: DEBUG nova.compute.provider_tree [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 149 to 150 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2317.358461] env[63279]: DEBUG nova.compute.provider_tree [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2317.550072] env[63279]: INFO nova.compute.manager [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Took 17.14 seconds to build instance. [ 2317.677087] env[63279]: DEBUG oslo_vmware.api [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2088028, 'name': PowerOffVM_Task, 'duration_secs': 0.180717} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2317.677388] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2317.677566] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2317.677821] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-845a0c76-f868-425c-acac-db15ffbc8133 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.732678] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2317.732931] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2317.864512] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.721s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2317.865265] env[63279]: DEBUG oslo_concurrency.lockutils [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.760s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2317.886195] env[63279]: INFO nova.scheduler.client.report [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Deleted allocations for instance 9b98a316-71da-45fb-b895-553f179fe7d9 [ 2317.999905] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2318.000529] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1269b00f-efdb-40fd-af87-6147d3284976 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.008182] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2318.008182] env[63279]: value = "task-2088030" [ 2318.008182] env[63279]: _type = "Task" [ 2318.008182] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2318.016156] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088030, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2318.053326] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4dcd6865-7eeb-43eb-8438-341d6fdde861 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "df37c4f6-b492-4d6e-9ba1-950dbbb9a885" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.659s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2318.236517] env[63279]: DEBUG nova.compute.utils [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2318.370197] env[63279]: INFO nova.compute.claims [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2318.395874] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d2fac537-4001-44e0-bd9a-505fdb7a57ca tempest-VolumesAdminNegativeTest-772882930 tempest-VolumesAdminNegativeTest-772882930-project-member] Lock "9b98a316-71da-45fb-b895-553f179fe7d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.075s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2318.519008] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088030, 'name': PowerOffVM_Task, 'duration_secs': 0.169602} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2318.519302] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2318.519568] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2318.520313] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07677ed8-c4b4-4d4b-a3df-8553b09f1060 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.527167] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2318.527258] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9871dea1-93d0-431e-95d2-cc84730b894c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.739957] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2318.811689] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "b7123d9a-bcaa-43c4-ac9f-982b3b146eb0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2318.811932] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "b7123d9a-bcaa-43c4-ac9f-982b3b146eb0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2318.876319] env[63279]: INFO nova.compute.resource_tracker [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating resource usage from migration 799416f5-6fd0-44c1-b42e-4d6384aeed82 [ 2319.075737] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c409b66d-c323-4ba5-8860-f25375790ed3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.087242] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d6d1ea-7fb6-48d1-8c66-8b31f14f5909 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.118983] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff3b7e80-53c5-4cd7-9b33-7b904ecb0c76 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.126794] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd17bb04-5379-4077-af9b-4a2281825e3e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.141366] env[63279]: DEBUG nova.compute.provider_tree [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2319.315051] env[63279]: DEBUG nova.compute.manager [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2319.647126] env[63279]: DEBUG nova.scheduler.client.report [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2319.800600] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2319.801150] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2319.801388] env[63279]: INFO nova.compute.manager [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Attaching volume 7aed4a6d-1348-48e4-857d-eed399e897d8 to /dev/sdb [ 2319.841020] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2319.842940] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8242ba4f-b39a-42e5-8936-90791da4133d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.850302] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45548895-2a5e-4ed4-995f-31e507551b5b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.865077] env[63279]: DEBUG nova.virt.block_device [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Updating existing volume attachment record: fcfe0f27-6a02-4bea-a396-a1a5e580e92c {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2319.887032] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2319.888260] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2319.888533] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Deleting the datastore file [datastore1] 27e2917d-3cd0-4ad3-ab65-f85f7d97225f {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2319.890198] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98b97db8-1dc6-4f2b-a8dc-829b4a262af9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.893156] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2319.893449] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2319.893609] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleting the datastore file [datastore1] d0b8c9dd-c683-4f3a-b819-d9d57d96636b {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2319.894060] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-acab8977-e980-48d1-a3be-d5151407e9d6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.897965] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2319.898267] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2319.898372] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleting the datastore file [datastore1] 1fca4e5c-fe2c-4b61-bed4-52c7770def7c {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2319.898887] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf5c267b-a259-4ee8-8276-0f53932b8f47 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.902560] env[63279]: DEBUG oslo_vmware.api [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Waiting for the task: (returnval){ [ 2319.902560] env[63279]: value = "task-2088032" [ 2319.902560] env[63279]: _type = "Task" [ 2319.902560] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2319.904387] env[63279]: DEBUG oslo_vmware.api [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for the task: (returnval){ [ 2319.904387] env[63279]: value = "task-2088033" [ 2319.904387] env[63279]: _type = "Task" [ 2319.904387] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2319.912763] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2319.912763] env[63279]: value = "task-2088034" [ 2319.912763] env[63279]: _type = "Task" [ 2319.912763] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2319.923583] env[63279]: DEBUG oslo_vmware.api [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2088032, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2319.924355] env[63279]: DEBUG oslo_vmware.api [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2088033, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2319.929588] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088034, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2320.152451] env[63279]: DEBUG oslo_concurrency.lockutils [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.287s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2320.152775] env[63279]: INFO nova.compute.manager [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Migrating [ 2320.164116] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.323s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2320.165655] env[63279]: INFO nova.compute.claims [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2320.416331] env[63279]: DEBUG oslo_vmware.api [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Task: {'id': task-2088033, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175828} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2320.423688] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2320.423909] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2320.424109] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2320.424293] env[63279]: INFO nova.compute.manager [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Took 7.19 seconds to destroy the instance on the hypervisor. [ 2320.424546] env[63279]: DEBUG oslo.service.loopingcall [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2320.424768] env[63279]: DEBUG oslo_vmware.api [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Task: {'id': task-2088032, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173473} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2320.424974] env[63279]: DEBUG nova.compute.manager [-] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2320.425086] env[63279]: DEBUG nova.network.neutron [-] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2320.426878] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2320.427100] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2320.427282] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2320.427456] env[63279]: INFO nova.compute.manager [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Took 3.28 seconds to destroy the instance on the hypervisor. [ 2320.427683] env[63279]: DEBUG oslo.service.loopingcall [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2320.428497] env[63279]: DEBUG nova.compute.manager [-] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2320.428554] env[63279]: DEBUG nova.network.neutron [-] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2320.434231] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088034, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173352} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2320.434743] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2320.435011] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2320.435322] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2320.678007] env[63279]: DEBUG oslo_concurrency.lockutils [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2320.682033] env[63279]: DEBUG oslo_concurrency.lockutils [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2320.682033] env[63279]: DEBUG nova.network.neutron [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2320.683592] env[63279]: DEBUG nova.compute.manager [req-234e7d8c-6d86-44ad-8d48-e050ea1014cb req-fa5921b9-ca56-452b-bf7e-02f904092e50 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Received event network-vif-plugged-cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2320.683712] env[63279]: DEBUG oslo_concurrency.lockutils [req-234e7d8c-6d86-44ad-8d48-e050ea1014cb req-fa5921b9-ca56-452b-bf7e-02f904092e50 service nova] Acquiring lock "efddec10-b496-446e-a05a-72c9f2d86ed9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2320.683897] env[63279]: DEBUG oslo_concurrency.lockutils [req-234e7d8c-6d86-44ad-8d48-e050ea1014cb req-fa5921b9-ca56-452b-bf7e-02f904092e50 service nova] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2320.684078] env[63279]: DEBUG oslo_concurrency.lockutils [req-234e7d8c-6d86-44ad-8d48-e050ea1014cb req-fa5921b9-ca56-452b-bf7e-02f904092e50 service nova] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2320.684318] env[63279]: DEBUG nova.compute.manager [req-234e7d8c-6d86-44ad-8d48-e050ea1014cb req-fa5921b9-ca56-452b-bf7e-02f904092e50 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] No waiting events found dispatching network-vif-plugged-cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2320.684618] env[63279]: WARNING nova.compute.manager [req-234e7d8c-6d86-44ad-8d48-e050ea1014cb req-fa5921b9-ca56-452b-bf7e-02f904092e50 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Received unexpected event network-vif-plugged-cfd0d031-3ccd-4ad0-9c99-6436a05c108d for instance with vm_state building and task_state spawning. [ 2320.825057] env[63279]: DEBUG nova.network.neutron [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Successfully updated port: cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2321.036278] env[63279]: DEBUG nova.compute.manager [req-ca7860ff-1c68-4b5f-8f58-4d4f9ea5fe68 req-55ba2901-a6df-4c4f-bfc3-cbf72e948eea service nova] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Received event network-vif-deleted-ee08f9b0-ce61-4fa5-bf67-3a97acafc55e {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2321.036498] env[63279]: INFO nova.compute.manager [req-ca7860ff-1c68-4b5f-8f58-4d4f9ea5fe68 req-55ba2901-a6df-4c4f-bfc3-cbf72e948eea service nova] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Neutron deleted interface ee08f9b0-ce61-4fa5-bf67-3a97acafc55e; detaching it from the instance and deleting it from the info cache [ 2321.036679] env[63279]: DEBUG nova.network.neutron [req-ca7860ff-1c68-4b5f-8f58-4d4f9ea5fe68 req-55ba2901-a6df-4c4f-bfc3-cbf72e948eea service nova] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2321.102009] env[63279]: DEBUG nova.network.neutron [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Successfully updated port: d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2321.329989] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2321.330261] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2321.330481] env[63279]: DEBUG nova.network.neutron [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2321.408855] env[63279]: DEBUG nova.network.neutron [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance_info_cache with network_info: [{"id": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "address": "fa:16:3e:6d:af:2f", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55a22eb2-68", "ovs_interfaceid": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2321.418140] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766c6fcd-b601-44e8-b097-360648f1816d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.424444] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f387503-3f7c-4766-9894-ce5f7cc80a3a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.461583] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46b6821-4159-4b82-91a0-72ed1b4702a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.469278] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06927a30-1c80-4419-a6d7-f9e4b736b7e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.484072] env[63279]: DEBUG nova.compute.provider_tree [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2321.498439] env[63279]: DEBUG nova.virt.hardware [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2321.498709] env[63279]: DEBUG nova.virt.hardware [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2321.499696] env[63279]: DEBUG nova.virt.hardware [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2321.499995] env[63279]: DEBUG nova.virt.hardware [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2321.500181] env[63279]: DEBUG nova.virt.hardware [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2321.500399] env[63279]: DEBUG nova.virt.hardware [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2321.500787] env[63279]: DEBUG nova.virt.hardware [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2321.501039] env[63279]: DEBUG nova.virt.hardware [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2321.501295] env[63279]: DEBUG nova.virt.hardware [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2321.501545] env[63279]: DEBUG nova.virt.hardware [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2321.501796] env[63279]: DEBUG nova.virt.hardware [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2321.503309] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-addd3011-9dff-473a-8aeb-417aea8e3708 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.512662] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c8ffea-45ce-4f7c-a251-98defb87eaf1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.517059] env[63279]: DEBUG nova.network.neutron [-] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2321.529741] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:8f:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57d4be17-536f-4a81-bea9-6547bd50f4a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e0ac67c-3039-4c36-831a-d32977fcab32', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2321.538165] env[63279]: DEBUG oslo.service.loopingcall [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2321.538988] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2321.539620] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb03ce26-3ea9-40b3-b640-9369230bea94 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.554761] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51d1f029-728b-4940-a7b8-6373166368c7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.565779] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2847109-28e9-4282-8933-9700b7682f3d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.575932] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2321.575932] env[63279]: value = "task-2088038" [ 2321.575932] env[63279]: _type = "Task" [ 2321.575932] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2321.596583] env[63279]: DEBUG nova.compute.manager [req-ca7860ff-1c68-4b5f-8f58-4d4f9ea5fe68 req-55ba2901-a6df-4c4f-bfc3-cbf72e948eea service nova] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Detach interface failed, port_id=ee08f9b0-ce61-4fa5-bf67-3a97acafc55e, reason: Instance d0b8c9dd-c683-4f3a-b819-d9d57d96636b could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2321.603442] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2321.603624] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2321.603811] env[63279]: DEBUG nova.network.neutron [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2321.865148] env[63279]: DEBUG nova.network.neutron [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2321.919877] env[63279]: DEBUG oslo_concurrency.lockutils [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2322.006199] env[63279]: ERROR nova.scheduler.client.report [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [req-132f5602-a55e-4f83-b1ad-f867ee7779e4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-132f5602-a55e-4f83-b1ad-f867ee7779e4"}]} [ 2322.019969] env[63279]: INFO nova.compute.manager [-] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Took 1.59 seconds to deallocate network for instance. [ 2322.028274] env[63279]: DEBUG nova.scheduler.client.report [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2322.038435] env[63279]: DEBUG nova.network.neutron [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Updating instance_info_cache with network_info: [{"id": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "address": "fa:16:3e:ff:42:fd", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfd0d031-3c", "ovs_interfaceid": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2322.047033] env[63279]: DEBUG nova.scheduler.client.report [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2322.047033] env[63279]: DEBUG nova.compute.provider_tree [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2322.058485] env[63279]: DEBUG nova.scheduler.client.report [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2322.082318] env[63279]: DEBUG nova.scheduler.client.report [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2322.089593] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088038, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2322.127733] env[63279]: DEBUG nova.network.neutron [-] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2322.142627] env[63279]: DEBUG nova.network.neutron [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2322.324359] env[63279]: DEBUG nova.network.neutron [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updating instance_info_cache with network_info: [{"id": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "address": "fa:16:3e:0c:f0:a6", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0192def-50", "ovs_interfaceid": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2322.339753] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f17b92-3a6c-40de-9421-feede99e403f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.350885] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ff2f2b-f205-47ba-91b6-bd6bf91b5306 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.382856] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9b8965-a782-41f5-a6d4-ad93744cc321 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.391828] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3221bd15-2688-40ea-b247-b7e822075830 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.408507] env[63279]: DEBUG nova.compute.provider_tree [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2322.532332] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2322.540597] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2322.540906] env[63279]: DEBUG nova.compute.manager [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Instance network_info: |[{"id": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "address": "fa:16:3e:ff:42:fd", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfd0d031-3c", "ovs_interfaceid": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2322.541373] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:42:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cfd0d031-3ccd-4ad0-9c99-6436a05c108d', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2322.550751] env[63279]: DEBUG oslo.service.loopingcall [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2322.551062] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2322.551373] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3ec9afc-1525-4be0-9421-37ccf7f3fa71 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.571904] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2322.571904] env[63279]: value = "task-2088040" [ 2322.571904] env[63279]: _type = "Task" [ 2322.571904] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2322.579904] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088040, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2322.587494] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088038, 'name': CreateVM_Task, 'duration_secs': 0.647536} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2322.587662] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2322.588340] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2322.588545] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2322.588955] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2322.589224] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-535ddb43-117b-42bb-bdb5-65d97b7cf39f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.594655] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2322.594655] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ac7a48-9c0b-0358-27bc-f44db87b1117" [ 2322.594655] env[63279]: _type = "Task" [ 2322.594655] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2322.604028] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ac7a48-9c0b-0358-27bc-f44db87b1117, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2322.631067] env[63279]: INFO nova.compute.manager [-] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Took 2.20 seconds to deallocate network for instance. [ 2322.711325] env[63279]: DEBUG nova.compute.manager [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Received event network-changed-cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2322.711542] env[63279]: DEBUG nova.compute.manager [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Refreshing instance network info cache due to event network-changed-cfd0d031-3ccd-4ad0-9c99-6436a05c108d. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2322.711760] env[63279]: DEBUG oslo_concurrency.lockutils [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] Acquiring lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2322.711933] env[63279]: DEBUG oslo_concurrency.lockutils [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] Acquired lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2322.712251] env[63279]: DEBUG nova.network.neutron [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Refreshing network info cache for port cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2322.826463] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2322.826924] env[63279]: DEBUG nova.compute.manager [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Instance network_info: |[{"id": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "address": "fa:16:3e:0c:f0:a6", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0192def-50", "ovs_interfaceid": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2322.827278] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:f0:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '41278529-8bd2-44a1-97c8-03967faa3ff7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0192def-50a0-40c0-9921-fbdf13e63ffb', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2322.835275] env[63279]: DEBUG oslo.service.loopingcall [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2322.835275] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2322.835421] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e93648b-0d76-4c41-91da-b39eb3931a1f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.856396] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2322.856396] env[63279]: value = "task-2088041" [ 2322.856396] env[63279]: _type = "Task" [ 2322.856396] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2322.864779] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088041, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2322.945625] env[63279]: DEBUG nova.scheduler.client.report [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 151 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2322.946144] env[63279]: DEBUG nova.compute.provider_tree [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 151 to 152 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2322.946517] env[63279]: DEBUG nova.compute.provider_tree [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2323.061778] env[63279]: DEBUG nova.compute.manager [req-117c09d9-ee27-4128-8583-8fa4b8c417c0 req-c62b7734-a295-4abf-88d1-e84cffb569e5 service nova] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Received event network-vif-deleted-0f4606f9-e916-43fe-8ad4-57247bfb98a2 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2323.081556] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088040, 'name': CreateVM_Task, 'duration_secs': 0.499447} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2323.081765] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2323.082546] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2323.104660] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ac7a48-9c0b-0358-27bc-f44db87b1117, 'name': SearchDatastore_Task, 'duration_secs': 0.012606} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2323.104982] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2323.105248] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2323.105507] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2323.105685] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2323.105885] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2323.106192] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2323.106500] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2323.106747] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4a7823e-3d4e-4cbc-9de6-c06693a9be36 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.108565] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b050f36-8577-4003-9ec6-7051f33cb85e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.113456] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2323.113456] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521b6d8f-35cc-3a28-954a-d79744b029b7" [ 2323.113456] env[63279]: _type = "Task" [ 2323.113456] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2323.117572] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2323.117766] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2323.118797] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7c8ec8d-9dd5-4c05-950d-8bdbac344101 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.123837] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521b6d8f-35cc-3a28-954a-d79744b029b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2323.127102] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2323.127102] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5255f79b-eefe-5a31-0641-4a9320dec51a" [ 2323.127102] env[63279]: _type = "Task" [ 2323.127102] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2323.139869] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5255f79b-eefe-5a31-0641-4a9320dec51a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2323.142211] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2323.366410] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088041, 'name': CreateVM_Task, 'duration_secs': 0.418712} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2323.366661] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2323.367321] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2323.434571] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d365dd5-9e58-4149-bdda-86ba6872a99f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.456893] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.293s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2323.457598] env[63279]: DEBUG nova.compute.manager [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2323.460485] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance '686a7ce2-2d07-411e-91d6-0471c55c3728' progress to 0 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2323.463926] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.932s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2323.464115] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2323.466323] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.324s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2323.466323] env[63279]: DEBUG nova.objects.instance [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Lazy-loading 'resources' on Instance uuid 27e2917d-3cd0-4ad3-ab65-f85f7d97225f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2323.500644] env[63279]: INFO nova.scheduler.client.report [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Deleted allocations for instance d0b8c9dd-c683-4f3a-b819-d9d57d96636b [ 2323.515210] env[63279]: DEBUG nova.network.neutron [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Updated VIF entry in instance network info cache for port cfd0d031-3ccd-4ad0-9c99-6436a05c108d. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2323.516398] env[63279]: DEBUG nova.network.neutron [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Updating instance_info_cache with network_info: [{"id": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "address": "fa:16:3e:ff:42:fd", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfd0d031-3c", "ovs_interfaceid": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2323.623763] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521b6d8f-35cc-3a28-954a-d79744b029b7, 'name': SearchDatastore_Task, 'duration_secs': 0.011527} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2323.624047] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2323.624295] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2323.624515] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2323.624733] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2323.625052] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2323.625301] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4331c16b-6890-4370-830e-12285365f400 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.632162] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2323.632162] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d446d5-5a02-ac12-8870-d87d8fd01d6d" [ 2323.632162] env[63279]: _type = "Task" [ 2323.632162] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2323.638310] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5255f79b-eefe-5a31-0641-4a9320dec51a, 'name': SearchDatastore_Task, 'duration_secs': 0.009683} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2323.639328] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c8924b0-25c2-474e-ab0a-b1e7083500e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.644591] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d446d5-5a02-ac12-8870-d87d8fd01d6d, 'name': SearchDatastore_Task, 'duration_secs': 0.009423} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2323.645139] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2323.645408] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2323.645664] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2323.647707] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2323.647707] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ca8ee4-835a-5f43-bd1e-0e8aec765ab9" [ 2323.647707] env[63279]: _type = "Task" [ 2323.647707] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2323.654931] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ca8ee4-835a-5f43-bd1e-0e8aec765ab9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2323.965749] env[63279]: DEBUG nova.compute.utils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2323.967165] env[63279]: DEBUG nova.compute.manager [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2323.967342] env[63279]: DEBUG nova.network.neutron [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2323.970885] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2323.973970] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52bec953-0c71-426b-bb90-8330b1e2dc9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.981913] env[63279]: DEBUG oslo_vmware.api [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2323.981913] env[63279]: value = "task-2088042" [ 2323.981913] env[63279]: _type = "Task" [ 2323.981913] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2323.991090] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] VM already powered off {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2323.991556] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance '686a7ce2-2d07-411e-91d6-0471c55c3728' progress to 17 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2324.009794] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9007d5a8-0d14-4da0-892a-51e06a67eea1 tempest-DeleteServersTestJSON-2126661104 tempest-DeleteServersTestJSON-2126661104-project-member] Lock "d0b8c9dd-c683-4f3a-b819-d9d57d96636b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.278s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2324.019643] env[63279]: DEBUG oslo_concurrency.lockutils [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] Releasing lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2324.019994] env[63279]: DEBUG nova.compute.manager [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Received event network-vif-plugged-d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2324.020235] env[63279]: DEBUG oslo_concurrency.lockutils [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] Acquiring lock "6f839780-be92-4d99-a96d-1fc14c819599-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2324.020494] env[63279]: DEBUG oslo_concurrency.lockutils [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] Lock "6f839780-be92-4d99-a96d-1fc14c819599-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2324.020735] env[63279]: DEBUG oslo_concurrency.lockutils [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] Lock "6f839780-be92-4d99-a96d-1fc14c819599-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2324.020959] env[63279]: DEBUG nova.compute.manager [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] No waiting events found dispatching network-vif-plugged-d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2324.021161] env[63279]: WARNING nova.compute.manager [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Received unexpected event network-vif-plugged-d0192def-50a0-40c0-9921-fbdf13e63ffb for instance with vm_state building and task_state spawning. [ 2324.021335] env[63279]: DEBUG nova.compute.manager [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Received event network-changed-d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2324.021499] env[63279]: DEBUG nova.compute.manager [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Refreshing instance network info cache due to event network-changed-d0192def-50a0-40c0-9921-fbdf13e63ffb. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2324.021700] env[63279]: DEBUG oslo_concurrency.lockutils [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] Acquiring lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2324.021846] env[63279]: DEBUG oslo_concurrency.lockutils [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] Acquired lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2324.022015] env[63279]: DEBUG nova.network.neutron [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Refreshing network info cache for port d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2324.068571] env[63279]: DEBUG nova.policy [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55199c771de04067a936194078ef99f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4fcef39e334249afb9636455802059c5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2324.157512] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ca8ee4-835a-5f43-bd1e-0e8aec765ab9, 'name': SearchDatastore_Task, 'duration_secs': 0.008626} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2324.157780] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2324.158059] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1fca4e5c-fe2c-4b61-bed4-52c7770def7c/1fca4e5c-fe2c-4b61-bed4-52c7770def7c.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2324.158354] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2324.158602] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2324.158833] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1d4533c-64bb-4add-bb67-3acd67b1f32d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.163437] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ef74162-ed12-4d43-81cc-d15e5e0a4d54 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.171114] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2324.171114] env[63279]: value = "task-2088043" [ 2324.171114] env[63279]: _type = "Task" [ 2324.171114] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2324.174629] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2324.174813] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2324.176562] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51d6e158-b98f-435d-8adc-0af8d0e6deda {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.181266] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088043, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2324.186583] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2324.186583] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ecbd8e-26fc-7721-bf63-59e1fa8ff2c6" [ 2324.186583] env[63279]: _type = "Task" [ 2324.186583] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2324.194377] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ecbd8e-26fc-7721-bf63-59e1fa8ff2c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2324.208926] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37aa6585-e51e-4238-9b03-b63687a1ee07 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.216411] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6269e6bc-7d04-4aac-8ee3-b590d3ad238e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.248970] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0dee42-7fd7-44c9-8aae-a8fdd3a67ba3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.256659] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf13128e-8284-4a4f-9ee3-7b0d195d0520 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.271048] env[63279]: DEBUG nova.compute.provider_tree [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2324.418226] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2324.418596] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427776', 'volume_id': '7aed4a6d-1348-48e4-857d-eed399e897d8', 'name': 'volume-7aed4a6d-1348-48e4-857d-eed399e897d8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '19e10ee4-99d1-44b9-9354-4c162d541a1f', 'attached_at': '', 'detached_at': '', 'volume_id': '7aed4a6d-1348-48e4-857d-eed399e897d8', 'serial': '7aed4a6d-1348-48e4-857d-eed399e897d8'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2324.419741] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94210272-cda6-4c30-a77a-990ce7331d00 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.441194] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe2783f-9d79-434b-8b55-7af5dbac2527 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.474389] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] volume-7aed4a6d-1348-48e4-857d-eed399e897d8/volume-7aed4a6d-1348-48e4-857d-eed399e897d8.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2324.475160] env[63279]: DEBUG nova.compute.manager [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2324.478907] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3baa640d-3464-4aa4-b810-f0c3c1afed05 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.501253] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2324.501253] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2324.501253] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2324.501467] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2324.501604] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2324.501864] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2324.502026] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2324.502191] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2324.502357] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2324.502519] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2324.502829] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2324.510162] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-900461f4-4a32-4a4b-8e81-42aca83a69c0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.523906] env[63279]: DEBUG oslo_vmware.api [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2324.523906] env[63279]: value = "task-2088044" [ 2324.523906] env[63279]: _type = "Task" [ 2324.523906] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2324.538675] env[63279]: DEBUG oslo_vmware.api [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2324.538675] env[63279]: value = "task-2088045" [ 2324.538675] env[63279]: _type = "Task" [ 2324.538675] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2324.548293] env[63279]: DEBUG oslo_vmware.api [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088044, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2324.555240] env[63279]: DEBUG oslo_vmware.api [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088045, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2324.621301] env[63279]: DEBUG nova.network.neutron [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Successfully created port: 57c5b936-4022-4267-bafa-19945e91592b {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2324.683793] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088043, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468411} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2324.684102] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1fca4e5c-fe2c-4b61-bed4-52c7770def7c/1fca4e5c-fe2c-4b61-bed4-52c7770def7c.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2324.684331] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2324.684606] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-67240732-cda9-4fd1-9da5-7a5fea35dad6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.692933] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2324.692933] env[63279]: value = "task-2088046" [ 2324.692933] env[63279]: _type = "Task" [ 2324.692933] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2324.698094] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ecbd8e-26fc-7721-bf63-59e1fa8ff2c6, 'name': SearchDatastore_Task, 'duration_secs': 0.008222} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2324.702492] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-208770f5-4ca8-4276-9f7e-15356ea4b095 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.711463] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2324.711463] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5270deaf-bb07-83a2-5b23-d96d785bec82" [ 2324.711463] env[63279]: _type = "Task" [ 2324.711463] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2324.714740] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088046, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2324.724282] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5270deaf-bb07-83a2-5b23-d96d785bec82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2324.778241] env[63279]: DEBUG nova.scheduler.client.report [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2324.819258] env[63279]: DEBUG nova.network.neutron [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updated VIF entry in instance network info cache for port d0192def-50a0-40c0-9921-fbdf13e63ffb. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2324.819532] env[63279]: DEBUG nova.network.neutron [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updating instance_info_cache with network_info: [{"id": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "address": "fa:16:3e:0c:f0:a6", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0192def-50", "ovs_interfaceid": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2325.036626] env[63279]: DEBUG oslo_vmware.api [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088044, 'name': ReconfigVM_Task, 'duration_secs': 0.45002} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2325.036934] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Reconfigured VM instance instance-00000062 to attach disk [datastore1] volume-7aed4a6d-1348-48e4-857d-eed399e897d8/volume-7aed4a6d-1348-48e4-857d-eed399e897d8.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2325.044023] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbcdcbe7-b1d0-41e3-ac9b-3d7102c4efdd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.068357] env[63279]: DEBUG oslo_vmware.api [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088045, 'name': ReconfigVM_Task, 'duration_secs': 0.176807} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2325.069815] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance '686a7ce2-2d07-411e-91d6-0471c55c3728' progress to 33 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2325.074174] env[63279]: DEBUG oslo_vmware.api [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2325.074174] env[63279]: value = "task-2088048" [ 2325.074174] env[63279]: _type = "Task" [ 2325.074174] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2325.086074] env[63279]: DEBUG oslo_vmware.api [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088048, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2325.209041] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088046, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064767} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2325.209329] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2325.210124] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00af7da9-db32-46aa-9ff8-4678ece74125 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.233218] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 1fca4e5c-fe2c-4b61-bed4-52c7770def7c/1fca4e5c-fe2c-4b61-bed4-52c7770def7c.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2325.236586] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9169e063-e16c-4c71-9b93-4bb3593f8347 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.257137] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5270deaf-bb07-83a2-5b23-d96d785bec82, 'name': SearchDatastore_Task, 'duration_secs': 0.00986} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2325.258418] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2325.258708] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] efddec10-b496-446e-a05a-72c9f2d86ed9/efddec10-b496-446e-a05a-72c9f2d86ed9.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2325.259046] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2325.259046] env[63279]: value = "task-2088049" [ 2325.259046] env[63279]: _type = "Task" [ 2325.259046] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2325.259263] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2325.259451] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2325.259666] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d91ac3ae-85d5-4672-9a3f-4614dd225c91 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.261797] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ce1c64b-528f-4515-88bb-5999c9bb0e0c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.271382] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088049, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2325.272552] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2325.272552] env[63279]: value = "task-2088050" [ 2325.272552] env[63279]: _type = "Task" [ 2325.272552] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2325.276269] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2325.276440] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2325.277483] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a690a217-75a7-4f07-9ae3-c5107dcbaf1c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.282805] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088050, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2325.286132] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.820s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2325.288439] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2325.288439] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521816b6-dbae-7258-d16c-0a91c122e945" [ 2325.288439] env[63279]: _type = "Task" [ 2325.288439] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2325.297687] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521816b6-dbae-7258-d16c-0a91c122e945, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2325.310218] env[63279]: INFO nova.scheduler.client.report [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Deleted allocations for instance 27e2917d-3cd0-4ad3-ab65-f85f7d97225f [ 2325.322387] env[63279]: DEBUG oslo_concurrency.lockutils [req-5ddcbe33-2c20-434e-bbab-51b7cb0e72c1 req-00daf220-619c-4e7b-a30f-3c8a69005e86 service nova] Releasing lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2325.486134] env[63279]: DEBUG nova.compute.manager [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2325.514788] env[63279]: DEBUG nova.virt.hardware [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2325.515091] env[63279]: DEBUG nova.virt.hardware [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2325.515258] env[63279]: DEBUG nova.virt.hardware [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2325.515446] env[63279]: DEBUG nova.virt.hardware [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2325.515599] env[63279]: DEBUG nova.virt.hardware [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2325.516050] env[63279]: DEBUG nova.virt.hardware [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2325.516384] env[63279]: DEBUG nova.virt.hardware [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2325.516517] env[63279]: DEBUG nova.virt.hardware [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2325.516754] env[63279]: DEBUG nova.virt.hardware [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2325.517077] env[63279]: DEBUG nova.virt.hardware [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2325.517144] env[63279]: DEBUG nova.virt.hardware [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2325.518113] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4cdfe3-db5b-4a91-be9b-502833c2d5bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.527958] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c5e5fb-6162-4984-a32f-57097329dddb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.578610] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2325.579256] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2325.579465] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2325.580020] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2325.580116] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2325.580538] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2325.580817] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2325.581011] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2325.581201] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2325.582652] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2325.582652] env[63279]: DEBUG nova.virt.hardware [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2325.588477] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Reconfiguring VM instance instance-00000051 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2325.588525] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a4d9259-e1ed-4ddb-b644-aa0d568ee44b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.613759] env[63279]: DEBUG oslo_vmware.api [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088048, 'name': ReconfigVM_Task, 'duration_secs': 0.144437} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2325.615400] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427776', 'volume_id': '7aed4a6d-1348-48e4-857d-eed399e897d8', 'name': 'volume-7aed4a6d-1348-48e4-857d-eed399e897d8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '19e10ee4-99d1-44b9-9354-4c162d541a1f', 'attached_at': '', 'detached_at': '', 'volume_id': '7aed4a6d-1348-48e4-857d-eed399e897d8', 'serial': '7aed4a6d-1348-48e4-857d-eed399e897d8'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2325.616950] env[63279]: DEBUG oslo_vmware.api [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2325.616950] env[63279]: value = "task-2088051" [ 2325.616950] env[63279]: _type = "Task" [ 2325.616950] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2325.625690] env[63279]: DEBUG oslo_vmware.api [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088051, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2325.771188] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088049, 'name': ReconfigVM_Task, 'duration_secs': 0.503978} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2325.771500] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 1fca4e5c-fe2c-4b61-bed4-52c7770def7c/1fca4e5c-fe2c-4b61-bed4-52c7770def7c.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2325.772129] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65cc6f9c-edaa-4fa9-b069-a8189015f78f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.782615] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088050, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.445907} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2325.784626] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] efddec10-b496-446e-a05a-72c9f2d86ed9/efddec10-b496-446e-a05a-72c9f2d86ed9.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2325.784926] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2325.785241] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2325.785241] env[63279]: value = "task-2088052" [ 2325.785241] env[63279]: _type = "Task" [ 2325.785241] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2325.785581] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-def28ceb-3f6f-4441-976a-951e68c7a794 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.794903] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2325.794903] env[63279]: value = "task-2088053" [ 2325.794903] env[63279]: _type = "Task" [ 2325.794903] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2325.798798] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088052, 'name': Rename_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2325.810480] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521816b6-dbae-7258-d16c-0a91c122e945, 'name': SearchDatastore_Task, 'duration_secs': 0.013071} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2325.813054] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61005dcc-3ba1-4ff7-b950-f3c52c499a4b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.825023] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088053, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2325.825023] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ba4b1409-29ce-4938-83f7-10d9c1416631 tempest-ServersTestFqdnHostnames-1241255271 tempest-ServersTestFqdnHostnames-1241255271-project-member] Lock "27e2917d-3cd0-4ad3-ab65-f85f7d97225f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.180s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2325.825023] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2325.825023] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52afa14b-8f06-d217-914b-25fc69713f2f" [ 2325.825023] env[63279]: _type = "Task" [ 2325.825023] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2325.835072] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52afa14b-8f06-d217-914b-25fc69713f2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.129148] env[63279]: DEBUG oslo_vmware.api [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088051, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.296752] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088052, 'name': Rename_Task, 'duration_secs': 0.14155} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2326.297979] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2326.298263] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ae834eb-a322-42e4-9ab8-1e43d91d26a9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.308211] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088053, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069689} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2326.309034] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2326.309316] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f47437a-7550-4a17-84e8-364d986d5f6d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.314227] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2326.314227] env[63279]: value = "task-2088054" [ 2326.314227] env[63279]: _type = "Task" [ 2326.314227] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2326.341027] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] efddec10-b496-446e-a05a-72c9f2d86ed9/efddec10-b496-446e-a05a-72c9f2d86ed9.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2326.344535] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f5bbc53-e11f-4a82-9a35-7557bc0cd981 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.363686] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088054, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.370875] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52afa14b-8f06-d217-914b-25fc69713f2f, 'name': SearchDatastore_Task, 'duration_secs': 0.009406} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2326.371692] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2326.372554] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6f839780-be92-4d99-a96d-1fc14c819599/6f839780-be92-4d99-a96d-1fc14c819599.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2326.372554] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2326.372554] env[63279]: value = "task-2088055" [ 2326.372554] env[63279]: _type = "Task" [ 2326.372554] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2326.372554] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d28c5e9-b292-4082-a6a0-7793a1f179b6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.383080] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088055, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.384372] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2326.384372] env[63279]: value = "task-2088056" [ 2326.384372] env[63279]: _type = "Task" [ 2326.384372] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2326.392179] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088056, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.426923] env[63279]: DEBUG nova.compute.manager [req-89cd5e0c-3b36-457d-8134-9b1e0ddef7be req-829f7b36-e17c-42d1-8ad1-d82f96624aac service nova] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Received event network-vif-plugged-57c5b936-4022-4267-bafa-19945e91592b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2326.427176] env[63279]: DEBUG oslo_concurrency.lockutils [req-89cd5e0c-3b36-457d-8134-9b1e0ddef7be req-829f7b36-e17c-42d1-8ad1-d82f96624aac service nova] Acquiring lock "b7123d9a-bcaa-43c4-ac9f-982b3b146eb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2326.427392] env[63279]: DEBUG oslo_concurrency.lockutils [req-89cd5e0c-3b36-457d-8134-9b1e0ddef7be req-829f7b36-e17c-42d1-8ad1-d82f96624aac service nova] Lock "b7123d9a-bcaa-43c4-ac9f-982b3b146eb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2326.427568] env[63279]: DEBUG oslo_concurrency.lockutils [req-89cd5e0c-3b36-457d-8134-9b1e0ddef7be req-829f7b36-e17c-42d1-8ad1-d82f96624aac service nova] Lock "b7123d9a-bcaa-43c4-ac9f-982b3b146eb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2326.427739] env[63279]: DEBUG nova.compute.manager [req-89cd5e0c-3b36-457d-8134-9b1e0ddef7be req-829f7b36-e17c-42d1-8ad1-d82f96624aac service nova] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] No waiting events found dispatching network-vif-plugged-57c5b936-4022-4267-bafa-19945e91592b {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2326.427923] env[63279]: WARNING nova.compute.manager [req-89cd5e0c-3b36-457d-8134-9b1e0ddef7be req-829f7b36-e17c-42d1-8ad1-d82f96624aac service nova] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Received unexpected event network-vif-plugged-57c5b936-4022-4267-bafa-19945e91592b for instance with vm_state building and task_state spawning. [ 2326.632086] env[63279]: DEBUG oslo_vmware.api [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088051, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.656506] env[63279]: DEBUG nova.network.neutron [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Successfully updated port: 57c5b936-4022-4267-bafa-19945e91592b {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2326.673785] env[63279]: DEBUG nova.objects.instance [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lazy-loading 'flavor' on Instance uuid 19e10ee4-99d1-44b9-9354-4c162d541a1f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2326.826185] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088054, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.888951] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088055, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.897022] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088056, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2327.133468] env[63279]: DEBUG oslo_vmware.api [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088051, 'name': ReconfigVM_Task, 'duration_secs': 1.184226} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.136025] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Reconfigured VM instance instance-00000051 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2327.136025] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55817b8b-e52b-4358-8842-010d3c3eb23b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.167376] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 686a7ce2-2d07-411e-91d6-0471c55c3728/686a7ce2-2d07-411e-91d6-0471c55c3728.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2327.167376] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "refresh_cache-b7123d9a-bcaa-43c4-ac9f-982b3b146eb0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2327.167376] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "refresh_cache-b7123d9a-bcaa-43c4-ac9f-982b3b146eb0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2327.167376] env[63279]: DEBUG nova.network.neutron [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2327.167702] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f42d51f-131d-4d37-9a1d-49578229e9be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.189633] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ea96f37b-0755-4740-9b8a-a16db021caec tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.388s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2327.196657] env[63279]: DEBUG oslo_vmware.api [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2327.196657] env[63279]: value = "task-2088057" [ 2327.196657] env[63279]: _type = "Task" [ 2327.196657] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2327.207633] env[63279]: DEBUG oslo_vmware.api [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088057, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2327.248471] env[63279]: DEBUG oslo_concurrency.lockutils [None req-25f6e9c5-5a91-435a-ad09-aba142621329 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2327.248700] env[63279]: DEBUG oslo_concurrency.lockutils [None req-25f6e9c5-5a91-435a-ad09-aba142621329 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2327.248902] env[63279]: DEBUG nova.compute.manager [None req-25f6e9c5-5a91-435a-ad09-aba142621329 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2327.253026] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdcbacfb-ecd2-4cfd-9613-b7cd0a890d8c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.260496] env[63279]: DEBUG nova.compute.manager [None req-25f6e9c5-5a91-435a-ad09-aba142621329 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2327.261195] env[63279]: DEBUG nova.objects.instance [None req-25f6e9c5-5a91-435a-ad09-aba142621329 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lazy-loading 'flavor' on Instance uuid 19e10ee4-99d1-44b9-9354-4c162d541a1f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2327.283520] env[63279]: DEBUG nova.network.neutron [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2327.329481] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088054, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2327.388664] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088055, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2327.402845] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088056, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2327.709884] env[63279]: DEBUG oslo_vmware.api [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088057, 'name': ReconfigVM_Task, 'duration_secs': 0.326493} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.711139] env[63279]: DEBUG nova.network.neutron [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Updating instance_info_cache with network_info: [{"id": "57c5b936-4022-4267-bafa-19945e91592b", "address": "fa:16:3e:75:ce:6a", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57c5b936-40", "ovs_interfaceid": "57c5b936-4022-4267-bafa-19945e91592b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2327.716050] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 686a7ce2-2d07-411e-91d6-0471c55c3728/686a7ce2-2d07-411e-91d6-0471c55c3728.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2327.716050] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance '686a7ce2-2d07-411e-91d6-0471c55c3728' progress to 50 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2327.825529] env[63279]: DEBUG oslo_vmware.api [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088054, 'name': PowerOnVM_Task, 'duration_secs': 1.362655} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.825805] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2327.826024] env[63279]: DEBUG nova.compute.manager [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2327.826844] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a60e97d-525c-434f-bf90-deed37cf6a03 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.888250] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088055, 'name': ReconfigVM_Task, 'duration_secs': 1.203172} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.888616] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Reconfigured VM instance instance-00000068 to attach disk [datastore1] efddec10-b496-446e-a05a-72c9f2d86ed9/efddec10-b496-446e-a05a-72c9f2d86ed9.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2327.891978] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff3decba-a590-4093-8b1c-eb5bf2400df2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.899121] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088056, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.057021} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.900541] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6f839780-be92-4d99-a96d-1fc14c819599/6f839780-be92-4d99-a96d-1fc14c819599.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2327.900541] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2327.900804] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2327.900804] env[63279]: value = "task-2088058" [ 2327.900804] env[63279]: _type = "Task" [ 2327.900804] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2327.900988] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a0f92803-c579-4604-8642-b0fd2293162b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.911174] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088058, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2327.912543] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2327.912543] env[63279]: value = "task-2088059" [ 2327.912543] env[63279]: _type = "Task" [ 2327.912543] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2327.922669] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088059, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2328.219960] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "refresh_cache-b7123d9a-bcaa-43c4-ac9f-982b3b146eb0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2328.220333] env[63279]: DEBUG nova.compute.manager [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Instance network_info: |[{"id": "57c5b936-4022-4267-bafa-19945e91592b", "address": "fa:16:3e:75:ce:6a", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57c5b936-40", "ovs_interfaceid": "57c5b936-4022-4267-bafa-19945e91592b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2328.226337] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:ce:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9fa933df-d66f-485e-8cf9-eda7f1a7f283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57c5b936-4022-4267-bafa-19945e91592b', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2328.233999] env[63279]: DEBUG oslo.service.loopingcall [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2328.234822] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b833f5-eded-40c3-9823-95fd8e0aca61 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.238059] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2328.238323] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88626a3a-44d1-4bbf-a347-f14aab19993f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.270815] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-25f6e9c5-5a91-435a-ad09-aba142621329 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2328.270815] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6e0d7d9-a740-4ca8-8f93-dd50406bec01 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.272172] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e293038-1f93-42be-a6b0-b2314d2faf00 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.276016] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2328.276016] env[63279]: value = "task-2088060" [ 2328.276016] env[63279]: _type = "Task" [ 2328.276016] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2328.300280] env[63279]: DEBUG oslo_vmware.api [None req-25f6e9c5-5a91-435a-ad09-aba142621329 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2328.300280] env[63279]: value = "task-2088061" [ 2328.300280] env[63279]: _type = "Task" [ 2328.300280] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2328.300705] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance '686a7ce2-2d07-411e-91d6-0471c55c3728' progress to 67 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2328.312195] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088060, 'name': CreateVM_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2328.317761] env[63279]: DEBUG oslo_vmware.api [None req-25f6e9c5-5a91-435a-ad09-aba142621329 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088061, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2328.345414] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2328.345414] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2328.345414] env[63279]: DEBUG nova.objects.instance [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2328.415439] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088058, 'name': Rename_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2328.423343] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088059, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.383078} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2328.423607] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2328.424454] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6952991c-eb0a-481b-bd6d-b7efda2de393 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.448465] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 6f839780-be92-4d99-a96d-1fc14c819599/6f839780-be92-4d99-a96d-1fc14c819599.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2328.449410] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89585e10-18b3-4ff2-829e-e9796557a0b2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.471193] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2328.471193] env[63279]: value = "task-2088062" [ 2328.471193] env[63279]: _type = "Task" [ 2328.471193] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2328.478798] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088062, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2328.586429] env[63279]: DEBUG nova.compute.manager [req-6983df09-a98f-473f-8dc7-09481f43265f req-ce3b9e26-5f9a-42d9-97e6-79fee6b55939 service nova] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Received event network-changed-57c5b936-4022-4267-bafa-19945e91592b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2328.586429] env[63279]: DEBUG nova.compute.manager [req-6983df09-a98f-473f-8dc7-09481f43265f req-ce3b9e26-5f9a-42d9-97e6-79fee6b55939 service nova] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Refreshing instance network info cache due to event network-changed-57c5b936-4022-4267-bafa-19945e91592b. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2328.586429] env[63279]: DEBUG oslo_concurrency.lockutils [req-6983df09-a98f-473f-8dc7-09481f43265f req-ce3b9e26-5f9a-42d9-97e6-79fee6b55939 service nova] Acquiring lock "refresh_cache-b7123d9a-bcaa-43c4-ac9f-982b3b146eb0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2328.586429] env[63279]: DEBUG oslo_concurrency.lockutils [req-6983df09-a98f-473f-8dc7-09481f43265f req-ce3b9e26-5f9a-42d9-97e6-79fee6b55939 service nova] Acquired lock "refresh_cache-b7123d9a-bcaa-43c4-ac9f-982b3b146eb0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2328.586429] env[63279]: DEBUG nova.network.neutron [req-6983df09-a98f-473f-8dc7-09481f43265f req-ce3b9e26-5f9a-42d9-97e6-79fee6b55939 service nova] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Refreshing network info cache for port 57c5b936-4022-4267-bafa-19945e91592b {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2328.787410] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088060, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2328.818199] env[63279]: DEBUG oslo_vmware.api [None req-25f6e9c5-5a91-435a-ad09-aba142621329 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088061, 'name': PowerOffVM_Task, 'duration_secs': 0.403457} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2328.818538] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-25f6e9c5-5a91-435a-ad09-aba142621329 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2328.818750] env[63279]: DEBUG nova.compute.manager [None req-25f6e9c5-5a91-435a-ad09-aba142621329 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2328.819654] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4338d269-8361-4d3e-a6e0-31499bb0ba28 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.878841] env[63279]: DEBUG nova.network.neutron [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Port 55a22eb2-68fd-4cc1-8372-6fed483f16d0 binding to destination host cpu-1 is already ACTIVE {{(pid=63279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2328.916597] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088058, 'name': Rename_Task, 'duration_secs': 0.531837} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2328.917227] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2328.917525] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e72d2c50-9178-4d2b-81ee-92ca644f41da {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.923986] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2328.923986] env[63279]: value = "task-2088063" [ 2328.923986] env[63279]: _type = "Task" [ 2328.923986] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2328.937159] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088063, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2328.979729] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088062, 'name': ReconfigVM_Task, 'duration_secs': 0.357945} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2328.981184] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 6f839780-be92-4d99-a96d-1fc14c819599/6f839780-be92-4d99-a96d-1fc14c819599.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2328.981184] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8e94d3e5-1788-4e0c-a9f4-7a711bf177bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.986688] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2328.986688] env[63279]: value = "task-2088064" [ 2328.986688] env[63279]: _type = "Task" [ 2328.986688] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2328.995411] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088064, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.293698] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088060, 'name': CreateVM_Task, 'duration_secs': 0.530417} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2329.294062] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2329.295315] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2329.297026] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2329.297026] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2329.297026] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f92ac5f-df57-4bf8-b090-0d57b402bb2f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.304015] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2329.304015] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52622eac-6ab2-7115-6e1d-127abdeb844b" [ 2329.304015] env[63279]: _type = "Task" [ 2329.304015] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2329.312603] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52622eac-6ab2-7115-6e1d-127abdeb844b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.334885] env[63279]: DEBUG oslo_concurrency.lockutils [None req-25f6e9c5-5a91-435a-ad09-aba142621329 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.086s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2329.352568] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b005ee8f-e9e3-4447-976a-ba049f2d4563 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2329.437053] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088063, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.488921] env[63279]: DEBUG nova.network.neutron [req-6983df09-a98f-473f-8dc7-09481f43265f req-ce3b9e26-5f9a-42d9-97e6-79fee6b55939 service nova] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Updated VIF entry in instance network info cache for port 57c5b936-4022-4267-bafa-19945e91592b. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2329.489293] env[63279]: DEBUG nova.network.neutron [req-6983df09-a98f-473f-8dc7-09481f43265f req-ce3b9e26-5f9a-42d9-97e6-79fee6b55939 service nova] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Updating instance_info_cache with network_info: [{"id": "57c5b936-4022-4267-bafa-19945e91592b", "address": "fa:16:3e:75:ce:6a", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57c5b936-40", "ovs_interfaceid": "57c5b936-4022-4267-bafa-19945e91592b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2329.505095] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088064, 'name': Rename_Task, 'duration_secs': 0.143313} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2329.505418] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2329.505750] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09ed4e2b-37da-446b-98ce-17d7708af230 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.513410] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2329.513410] env[63279]: value = "task-2088065" [ 2329.513410] env[63279]: _type = "Task" [ 2329.513410] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2329.528317] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088065, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.814598] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52622eac-6ab2-7115-6e1d-127abdeb844b, 'name': SearchDatastore_Task, 'duration_secs': 0.012267} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2329.815619] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2329.816047] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2329.816514] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2329.817144] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2329.820017] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2329.820017] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-232af0a2-54b3-4a22-bd62-8b6beb48c313 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.833118] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2329.833118] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2329.833118] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a74d6c63-3dc8-43c8-ac28-348b11bf62ae {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.838778] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2329.838778] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5241f0e0-4aaa-452a-593c-79c385e11297" [ 2329.838778] env[63279]: _type = "Task" [ 2329.838778] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2329.849890] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5241f0e0-4aaa-452a-593c-79c385e11297, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.905782] env[63279]: DEBUG oslo_concurrency.lockutils [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "686a7ce2-2d07-411e-91d6-0471c55c3728-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2329.906222] env[63279]: DEBUG oslo_concurrency.lockutils [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "686a7ce2-2d07-411e-91d6-0471c55c3728-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2329.906548] env[63279]: DEBUG oslo_concurrency.lockutils [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "686a7ce2-2d07-411e-91d6-0471c55c3728-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2329.934306] env[63279]: DEBUG oslo_vmware.api [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088063, 'name': PowerOnVM_Task, 'duration_secs': 0.527952} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2329.934488] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2329.934736] env[63279]: INFO nova.compute.manager [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Took 15.06 seconds to spawn the instance on the hypervisor. [ 2329.934866] env[63279]: DEBUG nova.compute.manager [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2329.935798] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1135a0-f0d3-4cc4-92d9-ce8c379f969a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.995371] env[63279]: DEBUG oslo_concurrency.lockutils [req-6983df09-a98f-473f-8dc7-09481f43265f req-ce3b9e26-5f9a-42d9-97e6-79fee6b55939 service nova] Releasing lock "refresh_cache-b7123d9a-bcaa-43c4-ac9f-982b3b146eb0" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2330.024270] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088065, 'name': PowerOnVM_Task} progress is 81%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.174029] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2330.174331] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2330.350871] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5241f0e0-4aaa-452a-593c-79c385e11297, 'name': SearchDatastore_Task, 'duration_secs': 0.017759} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2330.351814] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f86417c-744f-4419-80ae-964b18c2e84b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.358153] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2330.358153] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5234531b-abd2-52a4-82b6-ebb177d8a715" [ 2330.358153] env[63279]: _type = "Task" [ 2330.358153] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.368140] env[63279]: DEBUG nova.objects.instance [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lazy-loading 'flavor' on Instance uuid 19e10ee4-99d1-44b9-9354-4c162d541a1f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2330.369692] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5234531b-abd2-52a4-82b6-ebb177d8a715, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.457646] env[63279]: INFO nova.compute.manager [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Took 26.73 seconds to build instance. [ 2330.524579] env[63279]: DEBUG oslo_vmware.api [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088065, 'name': PowerOnVM_Task, 'duration_secs': 0.817988} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2330.524861] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2330.525486] env[63279]: INFO nova.compute.manager [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Took 13.36 seconds to spawn the instance on the hypervisor. [ 2330.525734] env[63279]: DEBUG nova.compute.manager [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2330.526652] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efef633-8123-4812-a9f4-c13ab7d60faf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.682387] env[63279]: INFO nova.compute.manager [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Detaching volume 34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9 [ 2330.718663] env[63279]: INFO nova.virt.block_device [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Attempting to driver detach volume 34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9 from mountpoint /dev/sdb [ 2330.718900] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2330.719110] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427763', 'volume_id': '34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9', 'name': 'volume-34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed', 'attached_at': '', 'detached_at': '', 'volume_id': '34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9', 'serial': '34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2330.720000] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3747075-08ce-4a1b-b81f-188270183cd0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.744084] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f27665-0d68-47a0-bcf7-8741ad620c8c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.752053] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17652d4-f328-4f13-901e-f0ae4a5aa4d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.774378] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb86cb3-7652-4903-a404-02139dcf16f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.790025] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] The volume has not been displaced from its original location: [datastore1] volume-34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9/volume-34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2330.795109] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Reconfiguring VM instance instance-00000053 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2330.795482] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ff06541-ec74-4c31-91aa-3977e7791b45 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.828380] env[63279]: DEBUG oslo_vmware.api [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2330.828380] env[63279]: value = "task-2088066" [ 2330.828380] env[63279]: _type = "Task" [ 2330.828380] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.828380] env[63279]: DEBUG oslo_vmware.api [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088066, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.869029] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5234531b-abd2-52a4-82b6-ebb177d8a715, 'name': SearchDatastore_Task, 'duration_secs': 0.011884} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2330.869357] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2330.869638] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] b7123d9a-bcaa-43c4-ac9f-982b3b146eb0/b7123d9a-bcaa-43c4-ac9f-982b3b146eb0.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2330.869914] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13c1bdd9-a432-4879-bd5c-3b22b0122372 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.874493] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2330.874664] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquired lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2330.874816] env[63279]: DEBUG nova.network.neutron [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2330.874993] env[63279]: DEBUG nova.objects.instance [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lazy-loading 'info_cache' on Instance uuid 19e10ee4-99d1-44b9-9354-4c162d541a1f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2330.880605] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2330.880605] env[63279]: value = "task-2088067" [ 2330.880605] env[63279]: _type = "Task" [ 2330.880605] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.889456] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088067, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.960325] env[63279]: DEBUG oslo_concurrency.lockutils [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2330.960581] env[63279]: DEBUG oslo_concurrency.lockutils [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2330.960894] env[63279]: DEBUG nova.network.neutron [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2330.962715] env[63279]: DEBUG oslo_concurrency.lockutils [None req-345ecece-5783-4496-827f-9af605e0dfd9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.243s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2331.027802] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Acquiring lock "1332faad-20be-4a81-b57e-171a49d5c427" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2331.027802] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Lock "1332faad-20be-4a81-b57e-171a49d5c427" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2331.049070] env[63279]: INFO nova.compute.manager [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Took 24.62 seconds to build instance. [ 2331.324083] env[63279]: DEBUG oslo_vmware.api [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088066, 'name': ReconfigVM_Task, 'duration_secs': 0.219472} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2331.324481] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Reconfigured VM instance instance-00000053 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2331.329626] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d4e8457-a389-4a1b-b27f-a036d808a7ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.348372] env[63279]: DEBUG oslo_vmware.api [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2331.348372] env[63279]: value = "task-2088068" [ 2331.348372] env[63279]: _type = "Task" [ 2331.348372] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2331.356956] env[63279]: DEBUG oslo_vmware.api [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088068, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.378917] env[63279]: DEBUG nova.objects.base [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Object Instance<19e10ee4-99d1-44b9-9354-4c162d541a1f> lazy-loaded attributes: flavor,info_cache {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2331.397863] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088067, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.529899] env[63279]: DEBUG nova.compute.manager [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2331.551183] env[63279]: DEBUG oslo_concurrency.lockutils [None req-021f3c12-e37e-456c-8801-ea10fa46af88 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "6f839780-be92-4d99-a96d-1fc14c819599" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.133s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2331.737908] env[63279]: DEBUG nova.network.neutron [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance_info_cache with network_info: [{"id": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "address": "fa:16:3e:6d:af:2f", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55a22eb2-68", "ovs_interfaceid": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2331.820840] env[63279]: DEBUG nova.compute.manager [req-ec07050e-7282-4526-bdd7-bfa5470db8e0 req-f223724d-f3d0-4bbf-8936-38827c65b992 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Received event network-changed-cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2331.821092] env[63279]: DEBUG nova.compute.manager [req-ec07050e-7282-4526-bdd7-bfa5470db8e0 req-f223724d-f3d0-4bbf-8936-38827c65b992 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Refreshing instance network info cache due to event network-changed-cfd0d031-3ccd-4ad0-9c99-6436a05c108d. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2331.821270] env[63279]: DEBUG oslo_concurrency.lockutils [req-ec07050e-7282-4526-bdd7-bfa5470db8e0 req-f223724d-f3d0-4bbf-8936-38827c65b992 service nova] Acquiring lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2331.821418] env[63279]: DEBUG oslo_concurrency.lockutils [req-ec07050e-7282-4526-bdd7-bfa5470db8e0 req-f223724d-f3d0-4bbf-8936-38827c65b992 service nova] Acquired lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2331.821626] env[63279]: DEBUG nova.network.neutron [req-ec07050e-7282-4526-bdd7-bfa5470db8e0 req-f223724d-f3d0-4bbf-8936-38827c65b992 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Refreshing network info cache for port cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2331.856984] env[63279]: DEBUG oslo_vmware.api [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088068, 'name': ReconfigVM_Task, 'duration_secs': 0.155334} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2331.857346] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427763', 'volume_id': '34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9', 'name': 'volume-34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed', 'attached_at': '', 'detached_at': '', 'volume_id': '34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9', 'serial': '34b70c8e-1ea7-4e6f-a6c7-bdb6b24134a9'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2331.893350] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088067, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.776735} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2331.893655] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] b7123d9a-bcaa-43c4-ac9f-982b3b146eb0/b7123d9a-bcaa-43c4-ac9f-982b3b146eb0.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2331.893906] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2331.894242] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a876d335-6d37-49a7-8fdf-ebbe809c2301 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.901644] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2331.901644] env[63279]: value = "task-2088069" [ 2331.901644] env[63279]: _type = "Task" [ 2331.901644] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2331.909528] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088069, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.053851] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2332.054250] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2332.055670] env[63279]: INFO nova.compute.claims [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2332.181019] env[63279]: DEBUG nova.network.neutron [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Updating instance_info_cache with network_info: [{"id": "15cdfe62-d983-4e01-beb9-1947d51443e0", "address": "fa:16:3e:a4:65:02", "network": {"id": "f7000655-b20b-461d-9d08-f4cb8a85522e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-686033866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7674483744fe490b8cbe75532dfad77c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15cdfe62-d9", "ovs_interfaceid": "15cdfe62-d983-4e01-beb9-1947d51443e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2332.241231] env[63279]: DEBUG oslo_concurrency.lockutils [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2332.418089] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088069, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.422179] env[63279]: DEBUG nova.objects.instance [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lazy-loading 'flavor' on Instance uuid 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2332.680551] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Releasing lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2332.770504] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29edcfa0-0751-4610-9d5b-49d646b09b6c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.792250] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acc9148-d588-439d-aa05-013f5cef5da9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.799747] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance '686a7ce2-2d07-411e-91d6-0471c55c3728' progress to 83 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2332.912568] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088069, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.840714} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2332.912883] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2332.913903] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0fd984-54c1-4928-b4c7-e3fa08cb6509 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.941424] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] b7123d9a-bcaa-43c4-ac9f-982b3b146eb0/b7123d9a-bcaa-43c4-ac9f-982b3b146eb0.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2332.942540] env[63279]: DEBUG nova.network.neutron [req-ec07050e-7282-4526-bdd7-bfa5470db8e0 req-f223724d-f3d0-4bbf-8936-38827c65b992 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Updated VIF entry in instance network info cache for port cfd0d031-3ccd-4ad0-9c99-6436a05c108d. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2332.942904] env[63279]: DEBUG nova.network.neutron [req-ec07050e-7282-4526-bdd7-bfa5470db8e0 req-f223724d-f3d0-4bbf-8936-38827c65b992 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Updating instance_info_cache with network_info: [{"id": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "address": "fa:16:3e:ff:42:fd", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfd0d031-3c", "ovs_interfaceid": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2332.947791] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ec13d9f-22f7-4b76-89e6-d3e1083a6b38 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.967171] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2332.967171] env[63279]: value = "task-2088070" [ 2332.967171] env[63279]: _type = "Task" [ 2332.967171] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.975676] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088070, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.310810] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-12baa868-1215-4292-913a-54b1657dbe5e tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance '686a7ce2-2d07-411e-91d6-0471c55c3728' progress to 100 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2333.319267] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f52c61-f405-4e54-85ba-79ccfc526666 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.327849] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1be8c559-a106-461d-9ac6-e8993d2713be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.371082] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54cfb55-edee-425e-b916-2ae60978cd47 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.381742] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7327956b-ddb9-44e0-80d4-807a5360ed0a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.399554] env[63279]: DEBUG nova.compute.provider_tree [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2333.462126] env[63279]: DEBUG oslo_concurrency.lockutils [req-ec07050e-7282-4526-bdd7-bfa5470db8e0 req-f223724d-f3d0-4bbf-8936-38827c65b992 service nova] Releasing lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2333.463177] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9a68ca5f-01df-419f-920f-a339f9c06164 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.289s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.480774] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088070, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.613896] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "36354325-dee0-406e-8eb6-bc3cf347a403" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2333.614151] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "36354325-dee0-406e-8eb6-bc3cf347a403" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2333.689021] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2333.689355] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b0cf07f-7c13-42ff-adfe-cb4a29e458f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.696522] env[63279]: DEBUG oslo_vmware.api [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2333.696522] env[63279]: value = "task-2088071" [ 2333.696522] env[63279]: _type = "Task" [ 2333.696522] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.708154] env[63279]: DEBUG oslo_vmware.api [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088071, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.750665] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2333.750941] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2333.751181] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2333.751376] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2333.751567] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.753831] env[63279]: INFO nova.compute.manager [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Terminating instance [ 2333.870135] env[63279]: DEBUG nova.compute.manager [req-4f684242-0ebd-484f-9441-faf91b17b132 req-0cc95677-b18c-42e5-9ac3-dffeb6acd065 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Received event network-changed-d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2333.870356] env[63279]: DEBUG nova.compute.manager [req-4f684242-0ebd-484f-9441-faf91b17b132 req-0cc95677-b18c-42e5-9ac3-dffeb6acd065 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Refreshing instance network info cache due to event network-changed-d0192def-50a0-40c0-9921-fbdf13e63ffb. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2333.870607] env[63279]: DEBUG oslo_concurrency.lockutils [req-4f684242-0ebd-484f-9441-faf91b17b132 req-0cc95677-b18c-42e5-9ac3-dffeb6acd065 service nova] Acquiring lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2333.870760] env[63279]: DEBUG oslo_concurrency.lockutils [req-4f684242-0ebd-484f-9441-faf91b17b132 req-0cc95677-b18c-42e5-9ac3-dffeb6acd065 service nova] Acquired lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2333.870919] env[63279]: DEBUG nova.network.neutron [req-4f684242-0ebd-484f-9441-faf91b17b132 req-0cc95677-b18c-42e5-9ac3-dffeb6acd065 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Refreshing network info cache for port d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2333.925547] env[63279]: ERROR nova.scheduler.client.report [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [req-e8cbe2fd-bdcd-422c-aea3-682eeff83366] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e8cbe2fd-bdcd-422c-aea3-682eeff83366"}]} [ 2333.952638] env[63279]: DEBUG nova.scheduler.client.report [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2333.967883] env[63279]: DEBUG nova.scheduler.client.report [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2333.967883] env[63279]: DEBUG nova.compute.provider_tree [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2333.978215] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088070, 'name': ReconfigVM_Task, 'duration_secs': 0.667544} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2333.979109] env[63279]: DEBUG nova.scheduler.client.report [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2333.981083] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Reconfigured VM instance instance-0000006a to attach disk [datastore1] b7123d9a-bcaa-43c4-ac9f-982b3b146eb0/b7123d9a-bcaa-43c4-ac9f-982b3b146eb0.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2333.981953] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf7dffde-c547-4c1d-889e-6115b7d2e174 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.988836] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2333.988836] env[63279]: value = "task-2088072" [ 2333.988836] env[63279]: _type = "Task" [ 2333.988836] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.997105] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088072, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.997971] env[63279]: DEBUG nova.scheduler.client.report [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2334.117220] env[63279]: DEBUG nova.compute.manager [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2334.210141] env[63279]: DEBUG oslo_vmware.api [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088071, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.227914] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cda5b6a-3bd2-424c-9b76-e7201b39c844 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.235419] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b85821-f6ba-4957-83f3-92f16b577962 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.266401] env[63279]: DEBUG nova.compute.manager [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2334.266578] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2334.267490] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35edbae-ee1a-43bb-91be-62497986138d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.270424] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33121365-2d7a-44ba-9b59-14d7a8c838f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.279252] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52ae6f0-7f20-4f56-a33c-7b57f4fc82e3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.282810] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2334.283037] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87f3965e-22dd-418f-9579-080ffd5f56b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.294486] env[63279]: DEBUG nova.compute.provider_tree [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2334.296976] env[63279]: DEBUG oslo_vmware.api [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2334.296976] env[63279]: value = "task-2088073" [ 2334.296976] env[63279]: _type = "Task" [ 2334.296976] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2334.305231] env[63279]: DEBUG oslo_vmware.api [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.500044] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088072, 'name': Rename_Task, 'duration_secs': 0.145175} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2334.500044] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2334.500044] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e35c93ec-87e1-4877-af91-da5ea5dfea0e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.505722] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2334.505722] env[63279]: value = "task-2088074" [ 2334.505722] env[63279]: _type = "Task" [ 2334.505722] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2334.514748] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088074, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.653056] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2334.715203] env[63279]: DEBUG oslo_vmware.api [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088071, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.814531] env[63279]: DEBUG oslo_vmware.api [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088073, 'name': PowerOffVM_Task, 'duration_secs': 0.175885} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2334.815561] env[63279]: DEBUG nova.network.neutron [req-4f684242-0ebd-484f-9441-faf91b17b132 req-0cc95677-b18c-42e5-9ac3-dffeb6acd065 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updated VIF entry in instance network info cache for port d0192def-50a0-40c0-9921-fbdf13e63ffb. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2334.815902] env[63279]: DEBUG nova.network.neutron [req-4f684242-0ebd-484f-9441-faf91b17b132 req-0cc95677-b18c-42e5-9ac3-dffeb6acd065 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updating instance_info_cache with network_info: [{"id": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "address": "fa:16:3e:0c:f0:a6", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0192def-50", "ovs_interfaceid": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2334.817148] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2334.817333] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2334.817596] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-296611b8-4ef4-40b4-a6ba-e8a442100fbf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.849766] env[63279]: DEBUG nova.scheduler.client.report [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 154 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2334.850058] env[63279]: DEBUG nova.compute.provider_tree [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 154 to 155 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2334.850248] env[63279]: DEBUG nova.compute.provider_tree [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2335.016493] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088074, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2335.037202] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2335.037590] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2335.037899] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Deleting the datastore file [datastore1] 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2335.038316] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7501f10-1352-46a7-9491-2f70c0da78eb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.046587] env[63279]: DEBUG oslo_vmware.api [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2335.046587] env[63279]: value = "task-2088076" [ 2335.046587] env[63279]: _type = "Task" [ 2335.046587] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2335.055914] env[63279]: DEBUG oslo_vmware.api [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2335.212171] env[63279]: DEBUG oslo_vmware.api [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088071, 'name': PowerOnVM_Task, 'duration_secs': 1.231149} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2335.212582] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2335.212829] env[63279]: DEBUG nova.compute.manager [None req-9d2191dd-fe1c-48cc-9f2b-ebd588c1ee49 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2335.217246] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c453cc23-531f-4a77-a73c-3bb8c333bef3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.321160] env[63279]: DEBUG oslo_concurrency.lockutils [req-4f684242-0ebd-484f-9441-faf91b17b132 req-0cc95677-b18c-42e5-9ac3-dffeb6acd065 service nova] Releasing lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2335.359289] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.305s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2335.359917] env[63279]: DEBUG nova.compute.manager [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2335.364240] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.712s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2335.366054] env[63279]: INFO nova.compute.claims [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2335.516946] env[63279]: DEBUG oslo_vmware.api [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088074, 'name': PowerOnVM_Task, 'duration_secs': 0.614813} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2335.516946] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2335.516946] env[63279]: INFO nova.compute.manager [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Took 10.03 seconds to spawn the instance on the hypervisor. [ 2335.517232] env[63279]: DEBUG nova.compute.manager [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2335.517973] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95269e62-94ed-4c34-8fe4-85770056df1d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.562184] env[63279]: DEBUG oslo_vmware.api [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088076, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.341844} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2335.562524] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2335.562766] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2335.563032] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2335.563268] env[63279]: INFO nova.compute.manager [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Took 1.30 seconds to destroy the instance on the hypervisor. [ 2335.563538] env[63279]: DEBUG oslo.service.loopingcall [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2335.563756] env[63279]: DEBUG nova.compute.manager [-] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2335.563880] env[63279]: DEBUG nova.network.neutron [-] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2335.577795] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "686a7ce2-2d07-411e-91d6-0471c55c3728" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2335.578130] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "686a7ce2-2d07-411e-91d6-0471c55c3728" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2335.578331] env[63279]: DEBUG nova.compute.manager [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Going to confirm migration 6 {{(pid=63279) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 2335.873308] env[63279]: DEBUG nova.compute.utils [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2335.876460] env[63279]: DEBUG nova.compute.manager [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Not allocating networking since 'none' was specified. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2336.046783] env[63279]: INFO nova.compute.manager [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Took 16.23 seconds to build instance. [ 2336.077273] env[63279]: DEBUG nova.compute.manager [req-c090d9a2-4f19-416c-92d8-185fea11d2f5 req-e12a3a89-eb0b-4827-8cb8-2975a7425371 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Received event network-vif-deleted-8f12bb0d-eec1-4c21-b319-372b37e319ca {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2336.077273] env[63279]: INFO nova.compute.manager [req-c090d9a2-4f19-416c-92d8-185fea11d2f5 req-e12a3a89-eb0b-4827-8cb8-2975a7425371 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Neutron deleted interface 8f12bb0d-eec1-4c21-b319-372b37e319ca; detaching it from the instance and deleting it from the info cache [ 2336.077631] env[63279]: DEBUG nova.network.neutron [req-c090d9a2-4f19-416c-92d8-185fea11d2f5 req-e12a3a89-eb0b-4827-8cb8-2975a7425371 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2336.102319] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2336.102319] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2336.150703] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2336.150703] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2336.150703] env[63279]: DEBUG nova.network.neutron [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2336.150703] env[63279]: DEBUG nova.objects.instance [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'info_cache' on Instance uuid 686a7ce2-2d07-411e-91d6-0471c55c3728 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2336.375953] env[63279]: DEBUG nova.compute.manager [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2336.549755] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8f3e2186-2a44-4b2b-b8bb-3a857a77b580 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "b7123d9a-bcaa-43c4-ac9f-982b3b146eb0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.737s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2336.553026] env[63279]: DEBUG nova.network.neutron [-] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2336.572122] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9034669-a814-44f4-964e-16f2e19f8523 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.581604] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1c69accd-2618-4885-b01d-77855c7e76b6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.584176] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5b27a7-7919-4bcc-841d-542d765bc13b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.613947] env[63279]: DEBUG nova.compute.manager [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2336.618367] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269bfdc4-ca07-421b-9cd5-baf7e59fb5e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.623723] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c78963c-314b-4fba-a5f0-da09a8c1af61 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.639646] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e33358c-6649-44ee-9952-199cfd341fc5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.657888] env[63279]: DEBUG nova.compute.manager [req-c090d9a2-4f19-416c-92d8-185fea11d2f5 req-e12a3a89-eb0b-4827-8cb8-2975a7425371 service nova] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Detach interface failed, port_id=8f12bb0d-eec1-4c21-b319-372b37e319ca, reason: Instance 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2336.669419] env[63279]: DEBUG nova.compute.provider_tree [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2337.055943] env[63279]: INFO nova.compute.manager [-] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Took 1.49 seconds to deallocate network for instance. [ 2337.142344] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2337.173135] env[63279]: DEBUG nova.scheduler.client.report [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2337.390847] env[63279]: DEBUG nova.compute.manager [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2337.418489] env[63279]: DEBUG nova.network.neutron [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance_info_cache with network_info: [{"id": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "address": "fa:16:3e:6d:af:2f", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55a22eb2-68", "ovs_interfaceid": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2337.420973] env[63279]: DEBUG nova.virt.hardware [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2337.421231] env[63279]: DEBUG nova.virt.hardware [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2337.421390] env[63279]: DEBUG nova.virt.hardware [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2337.421620] env[63279]: DEBUG nova.virt.hardware [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2337.421785] env[63279]: DEBUG nova.virt.hardware [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2337.421938] env[63279]: DEBUG nova.virt.hardware [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2337.422169] env[63279]: DEBUG nova.virt.hardware [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2337.422336] env[63279]: DEBUG nova.virt.hardware [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2337.422506] env[63279]: DEBUG nova.virt.hardware [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2337.422672] env[63279]: DEBUG nova.virt.hardware [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2337.422848] env[63279]: DEBUG nova.virt.hardware [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2337.425880] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-accb3406-aa05-4aac-8f1c-bff6a8acbfc3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.435287] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6168030-0295-4982-a4a7-bd6d94e1567f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.449912] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Instance VIF info [] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2337.455535] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Creating folder: Project (3ddca42f29b14f379f0bbba5c80847da). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2337.455873] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ece1f360-08c6-480c-a145-f0abdff5bae9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.466153] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Created folder: Project (3ddca42f29b14f379f0bbba5c80847da) in parent group-v427491. [ 2337.466368] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Creating folder: Instances. Parent ref: group-v427781. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2337.466612] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b092a50-c237-4794-94d4-ff6236e6f31a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.475416] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Created folder: Instances in parent group-v427781. [ 2337.475657] env[63279]: DEBUG oslo.service.loopingcall [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2337.475855] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2337.476072] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54227543-a371-402a-8539-5dca794890cc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.492615] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2337.492615] env[63279]: value = "task-2088079" [ 2337.492615] env[63279]: _type = "Task" [ 2337.492615] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2337.499894] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088079, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2337.544131] env[63279]: DEBUG oslo_concurrency.lockutils [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "b7123d9a-bcaa-43c4-ac9f-982b3b146eb0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2337.544462] env[63279]: DEBUG oslo_concurrency.lockutils [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "b7123d9a-bcaa-43c4-ac9f-982b3b146eb0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2337.544735] env[63279]: DEBUG oslo_concurrency.lockutils [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "b7123d9a-bcaa-43c4-ac9f-982b3b146eb0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2337.544942] env[63279]: DEBUG oslo_concurrency.lockutils [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "b7123d9a-bcaa-43c4-ac9f-982b3b146eb0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2337.545160] env[63279]: DEBUG oslo_concurrency.lockutils [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "b7123d9a-bcaa-43c4-ac9f-982b3b146eb0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2337.547548] env[63279]: INFO nova.compute.manager [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Terminating instance [ 2337.564107] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2337.680591] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.316s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2337.681245] env[63279]: DEBUG nova.compute.manager [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2337.684212] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.542s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2337.685801] env[63279]: INFO nova.compute.claims [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2337.920664] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2337.921043] env[63279]: DEBUG nova.objects.instance [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'migration_context' on Instance uuid 686a7ce2-2d07-411e-91d6-0471c55c3728 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2338.002828] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088079, 'name': CreateVM_Task, 'duration_secs': 0.298219} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2338.004070] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2338.004070] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2338.004070] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2338.004268] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2338.004435] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d835451f-1d48-4a24-8bb7-4ab4a05e43c1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.009821] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Waiting for the task: (returnval){ [ 2338.009821] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5240150f-a18e-84ad-d718-8263ea3c1b3c" [ 2338.009821] env[63279]: _type = "Task" [ 2338.009821] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2338.018032] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5240150f-a18e-84ad-d718-8263ea3c1b3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2338.052054] env[63279]: DEBUG nova.compute.manager [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2338.052324] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2338.053243] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2942f14-b727-448a-b2d5-8fea8aa33fc2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.060712] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2338.060947] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ab238d2-5396-46f9-81a7-c938b80bdea1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.069313] env[63279]: DEBUG oslo_vmware.api [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2338.069313] env[63279]: value = "task-2088080" [ 2338.069313] env[63279]: _type = "Task" [ 2338.069313] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2338.078321] env[63279]: DEBUG oslo_vmware.api [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088080, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2338.191643] env[63279]: DEBUG nova.compute.utils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2338.195859] env[63279]: DEBUG nova.compute.manager [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2338.195859] env[63279]: DEBUG nova.network.neutron [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2338.274068] env[63279]: DEBUG nova.policy [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6343b207f7294f5fa2a8111940083fb0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5b21bc5072e4945a19a782dd9561709', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2338.426633] env[63279]: DEBUG nova.objects.base [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Object Instance<686a7ce2-2d07-411e-91d6-0471c55c3728> lazy-loaded attributes: info_cache,migration_context {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2338.427876] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b785fa59-17ac-4139-a997-f6777c7aea94 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.448657] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bce25e20-310a-4955-b1bb-3ff4dda65534 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.454159] env[63279]: DEBUG oslo_vmware.api [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2338.454159] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52175705-56b2-6aa9-8b73-139430e564e0" [ 2338.454159] env[63279]: _type = "Task" [ 2338.454159] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2338.461859] env[63279]: DEBUG oslo_vmware.api [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52175705-56b2-6aa9-8b73-139430e564e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2338.523174] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5240150f-a18e-84ad-d718-8263ea3c1b3c, 'name': SearchDatastore_Task, 'duration_secs': 0.012083} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2338.523174] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2338.523174] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2338.523174] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2338.523174] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2338.523174] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2338.523174] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63574ea9-5386-4a86-87d7-07e42cd1a66d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.531182] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2338.532205] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2338.532331] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-637b2ea3-1876-45a6-b08a-3c72fa5f616a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.538073] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Waiting for the task: (returnval){ [ 2338.538073] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5273a93c-1938-15f6-72e6-e7ff970b847d" [ 2338.538073] env[63279]: _type = "Task" [ 2338.538073] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2338.546217] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5273a93c-1938-15f6-72e6-e7ff970b847d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2338.571155] env[63279]: DEBUG nova.network.neutron [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Successfully created port: a05a3f68-d80e-46c9-9065-1f803e7c04ff {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2338.581629] env[63279]: DEBUG oslo_vmware.api [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088080, 'name': PowerOffVM_Task, 'duration_secs': 0.18621} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2338.581629] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2338.581629] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2338.581891] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38c7638a-8b9c-4479-8418-68c4f8deba2e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.696579] env[63279]: DEBUG nova.compute.manager [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2338.898922] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4807edc-aae5-4d2c-9584-0567d798c8cb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.908035] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7879dd-ba58-481f-8712-65445f77f6d4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.937339] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1938fd6-9d63-4b26-b008-fa5e278f55db {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.945426] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5330e31-11de-4276-a201-1fa2a93c6481 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.960717] env[63279]: DEBUG nova.compute.provider_tree [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2338.970214] env[63279]: DEBUG oslo_vmware.api [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52175705-56b2-6aa9-8b73-139430e564e0, 'name': SearchDatastore_Task, 'duration_secs': 0.007415} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2338.971127] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2339.047855] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5273a93c-1938-15f6-72e6-e7ff970b847d, 'name': SearchDatastore_Task, 'duration_secs': 0.009822} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2339.048674] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a64d8ba-b38f-4168-86be-6a1194f16c96 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.054009] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Waiting for the task: (returnval){ [ 2339.054009] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ec32d9-d9f2-acd4-15db-b456a22901be" [ 2339.054009] env[63279]: _type = "Task" [ 2339.054009] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2339.061065] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ec32d9-d9f2-acd4-15db-b456a22901be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2339.466171] env[63279]: DEBUG nova.scheduler.client.report [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2339.564149] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ec32d9-d9f2-acd4-15db-b456a22901be, 'name': SearchDatastore_Task, 'duration_secs': 0.058631} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2339.564427] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2339.564686] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1332faad-20be-4a81-b57e-171a49d5c427/1332faad-20be-4a81-b57e-171a49d5c427.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2339.564977] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e109960-8c5d-426a-8186-b22a6c093acd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.571552] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Waiting for the task: (returnval){ [ 2339.571552] env[63279]: value = "task-2088082" [ 2339.571552] env[63279]: _type = "Task" [ 2339.571552] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2339.581614] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': task-2088082, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2339.710268] env[63279]: DEBUG nova.compute.manager [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2339.738192] env[63279]: DEBUG nova.virt.hardware [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2339.738720] env[63279]: DEBUG nova.virt.hardware [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2339.738994] env[63279]: DEBUG nova.virt.hardware [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2339.739226] env[63279]: DEBUG nova.virt.hardware [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2339.739385] env[63279]: DEBUG nova.virt.hardware [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2339.739543] env[63279]: DEBUG nova.virt.hardware [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2339.739762] env[63279]: DEBUG nova.virt.hardware [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2339.739925] env[63279]: DEBUG nova.virt.hardware [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2339.740118] env[63279]: DEBUG nova.virt.hardware [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2339.740288] env[63279]: DEBUG nova.virt.hardware [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2339.740462] env[63279]: DEBUG nova.virt.hardware [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2339.741730] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8081c5c-bffe-40e6-9d3c-c3ea13ffd3ce {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.750314] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfce055a-be29-4ad1-a17f-32dc524c8985 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.970978] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.287s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2339.971516] env[63279]: DEBUG nova.compute.manager [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2339.974401] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.411s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2339.975307] env[63279]: DEBUG nova.objects.instance [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lazy-loading 'resources' on Instance uuid 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2340.081898] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': task-2088082, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.437545} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2340.082232] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1332faad-20be-4a81-b57e-171a49d5c427/1332faad-20be-4a81-b57e-171a49d5c427.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2340.082379] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2340.082632] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e9ab82c5-e507-4d03-8aad-548efc99d26d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.089060] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Waiting for the task: (returnval){ [ 2340.089060] env[63279]: value = "task-2088083" [ 2340.089060] env[63279]: _type = "Task" [ 2340.089060] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2340.096199] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': task-2088083, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2340.477722] env[63279]: DEBUG nova.compute.utils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2340.481453] env[63279]: DEBUG nova.compute.manager [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2340.481649] env[63279]: DEBUG nova.network.neutron [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2340.558848] env[63279]: DEBUG nova.policy [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7cd64afda9f4eb488bdf76a36f2fee6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f9427c264e8e41998f579af352cb48cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2340.598676] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': task-2088083, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09872} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2340.600939] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2340.601869] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c75ba46-c03b-468f-8677-c287a645f529 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.620973] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 1332faad-20be-4a81-b57e-171a49d5c427/1332faad-20be-4a81-b57e-171a49d5c427.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2340.623736] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-965ea0c5-4756-4052-b225-6584f1940379 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.643836] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Waiting for the task: (returnval){ [ 2340.643836] env[63279]: value = "task-2088084" [ 2340.643836] env[63279]: _type = "Task" [ 2340.643836] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2340.653827] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': task-2088084, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2340.703668] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067443ed-33d3-468b-8de5-07c9319fe6bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.711148] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c74e2f62-dcdd-4c53-b54f-58cdf1e6ace3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.743718] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955e0466-5d14-4118-b42f-76926ed1fc6e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.751024] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd7fee9-0570-4fd4-a12c-129e25a3f6bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.764692] env[63279]: DEBUG nova.compute.provider_tree [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2340.877015] env[63279]: DEBUG nova.network.neutron [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Successfully created port: 48a709b0-3a67-4115-a178-0872536d2417 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2340.985052] env[63279]: DEBUG nova.compute.manager [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2341.153858] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': task-2088084, 'name': ReconfigVM_Task, 'duration_secs': 0.256575} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2341.154162] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 1332faad-20be-4a81-b57e-171a49d5c427/1332faad-20be-4a81-b57e-171a49d5c427.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2341.154971] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc18d8ef-3c66-4f74-950b-c79c8049f116 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2341.161204] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Waiting for the task: (returnval){ [ 2341.161204] env[63279]: value = "task-2088085" [ 2341.161204] env[63279]: _type = "Task" [ 2341.161204] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2341.168225] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': task-2088085, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2341.267742] env[63279]: DEBUG nova.scheduler.client.report [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2341.671125] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': task-2088085, 'name': Rename_Task, 'duration_secs': 0.136131} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2341.671407] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2341.671684] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f351fd66-c87c-4dcc-b10f-7ed786062fda {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2341.677277] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Waiting for the task: (returnval){ [ 2341.677277] env[63279]: value = "task-2088086" [ 2341.677277] env[63279]: _type = "Task" [ 2341.677277] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2341.684360] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': task-2088086, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2341.772943] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.798s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2341.775347] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.804s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2341.795691] env[63279]: INFO nova.scheduler.client.report [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Deleted allocations for instance 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed [ 2341.997021] env[63279]: DEBUG nova.compute.manager [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2342.025679] env[63279]: DEBUG nova.virt.hardware [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2342.025942] env[63279]: DEBUG nova.virt.hardware [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2342.026213] env[63279]: DEBUG nova.virt.hardware [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2342.026328] env[63279]: DEBUG nova.virt.hardware [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2342.026524] env[63279]: DEBUG nova.virt.hardware [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2342.026692] env[63279]: DEBUG nova.virt.hardware [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2342.026909] env[63279]: DEBUG nova.virt.hardware [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2342.027087] env[63279]: DEBUG nova.virt.hardware [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2342.027267] env[63279]: DEBUG nova.virt.hardware [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2342.027434] env[63279]: DEBUG nova.virt.hardware [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2342.027618] env[63279]: DEBUG nova.virt.hardware [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2342.028643] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921ae2e5-2b7e-4433-a0a9-092ba4018a3e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.039518] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a876f0b-f7a6-4383-8219-2fbb832b8d29 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.186838] env[63279]: DEBUG oslo_vmware.api [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': task-2088086, 'name': PowerOnVM_Task, 'duration_secs': 0.397941} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2342.187184] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2342.187316] env[63279]: INFO nova.compute.manager [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Took 4.80 seconds to spawn the instance on the hypervisor. [ 2342.187503] env[63279]: DEBUG nova.compute.manager [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2342.188298] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e96d8f3-1e85-4c9d-a212-e63db63aa892 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.303205] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4699cda3-9d4a-4758-a036-5b3787fbfef0 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.552s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2342.475693] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f76412-b03a-4e4b-a81f-a16ef2c98d98 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.483391] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d76a7c5-b049-4fe8-8e1a-b5a36a4d6752 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.514761] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c858685-771b-4646-bdc3-9747efe6ef8d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.523406] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2bcba1-6c9b-480c-9ce7-049c185bbb1b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.540962] env[63279]: DEBUG nova.compute.provider_tree [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2342.705862] env[63279]: INFO nova.compute.manager [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Took 10.67 seconds to build instance. [ 2343.044677] env[63279]: DEBUG nova.scheduler.client.report [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2343.208702] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f932193-d209-4b05-9fd3-35d4edffa040 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Lock "1332faad-20be-4a81-b57e-171a49d5c427" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.181s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2343.296493] env[63279]: DEBUG nova.compute.manager [None req-5e5ea9a6-e2db-43e9-acf8-9ebfcd2ece70 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2343.297451] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d3fa3a-ae98-4c7a-b9ef-5a692aa1b795 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.355325] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Acquiring lock "1332faad-20be-4a81-b57e-171a49d5c427" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2343.355584] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Lock "1332faad-20be-4a81-b57e-171a49d5c427" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2343.355800] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Acquiring lock "1332faad-20be-4a81-b57e-171a49d5c427-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2343.355987] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Lock "1332faad-20be-4a81-b57e-171a49d5c427-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2343.356181] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Lock "1332faad-20be-4a81-b57e-171a49d5c427-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2343.358329] env[63279]: INFO nova.compute.manager [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Terminating instance [ 2343.807344] env[63279]: INFO nova.compute.manager [None req-5e5ea9a6-e2db-43e9-acf8-9ebfcd2ece70 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] instance snapshotting [ 2343.807960] env[63279]: DEBUG nova.objects.instance [None req-5e5ea9a6-e2db-43e9-acf8-9ebfcd2ece70 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Lazy-loading 'flavor' on Instance uuid 1332faad-20be-4a81-b57e-171a49d5c427 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2343.862471] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Acquiring lock "refresh_cache-1332faad-20be-4a81-b57e-171a49d5c427" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2343.862808] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Acquired lock "refresh_cache-1332faad-20be-4a81-b57e-171a49d5c427" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2343.863061] env[63279]: DEBUG nova.network.neutron [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2344.057054] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.281s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2344.057401] env[63279]: DEBUG nova.compute.manager [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=63279) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 2344.313249] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c856448c-3e94-4290-b215-9ebee5c6c56b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.330788] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b3e332-a0c6-47a9-8cc8-9c0aabad0ea3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.381493] env[63279]: DEBUG nova.network.neutron [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2344.429252] env[63279]: DEBUG nova.network.neutron [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2344.618172] env[63279]: INFO nova.scheduler.client.report [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleted allocation for migration 799416f5-6fd0-44c1-b42e-4d6384aeed82 [ 2344.840229] env[63279]: DEBUG nova.compute.manager [None req-5e5ea9a6-e2db-43e9-acf8-9ebfcd2ece70 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Instance disappeared during snapshot {{(pid=63279) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 2344.932382] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Releasing lock "refresh_cache-1332faad-20be-4a81-b57e-171a49d5c427" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2344.932902] env[63279]: DEBUG nova.compute.manager [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2344.933267] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2344.934462] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f46f97b-82be-49e5-8fb3-11950706886f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.945097] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2344.945354] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b95cc05-1e67-4928-9977-184dcb91d688 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.952444] env[63279]: DEBUG oslo_vmware.api [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Waiting for the task: (returnval){ [ 2344.952444] env[63279]: value = "task-2088088" [ 2344.952444] env[63279]: _type = "Task" [ 2344.952444] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2344.963435] env[63279]: DEBUG oslo_vmware.api [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': task-2088088, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2344.964458] env[63279]: DEBUG nova.compute.manager [None req-5e5ea9a6-e2db-43e9-acf8-9ebfcd2ece70 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Found 0 images (rotation: 2) {{(pid=63279) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 2345.124348] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8fb58680-e1a0-40f5-a223-7004b1de5c5a tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "686a7ce2-2d07-411e-91d6-0471c55c3728" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.546s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2345.464398] env[63279]: DEBUG oslo_vmware.api [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': task-2088088, 'name': PowerOffVM_Task, 'duration_secs': 0.206394} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2345.464398] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2345.464398] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2345.464398] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6789ac12-1854-456b-9100-f53b15a09e73 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.491845] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2345.493684] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2345.493684] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Deleting the datastore file [datastore1] 1332faad-20be-4a81-b57e-171a49d5c427 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2345.493684] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47cc4733-acbd-4db1-8fe5-6446f69bfa08 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.499768] env[63279]: DEBUG oslo_vmware.api [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Waiting for the task: (returnval){ [ 2345.499768] env[63279]: value = "task-2088090" [ 2345.499768] env[63279]: _type = "Task" [ 2345.499768] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2345.508682] env[63279]: DEBUG oslo_vmware.api [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': task-2088090, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2345.933154] env[63279]: DEBUG nova.objects.instance [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'flavor' on Instance uuid 686a7ce2-2d07-411e-91d6-0471c55c3728 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2346.010608] env[63279]: DEBUG oslo_vmware.api [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Task: {'id': task-2088090, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09416} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2346.010896] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2346.011097] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2346.011282] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2346.011630] env[63279]: INFO nova.compute.manager [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Took 1.08 seconds to destroy the instance on the hypervisor. [ 2346.011765] env[63279]: DEBUG oslo.service.loopingcall [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2346.011956] env[63279]: DEBUG nova.compute.manager [-] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2346.012457] env[63279]: DEBUG nova.network.neutron [-] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2346.029354] env[63279]: DEBUG nova.network.neutron [-] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2346.439473] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2346.439473] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2346.439473] env[63279]: DEBUG nova.network.neutron [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2346.439473] env[63279]: DEBUG nova.objects.instance [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'info_cache' on Instance uuid 686a7ce2-2d07-411e-91d6-0471c55c3728 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2346.531908] env[63279]: DEBUG nova.network.neutron [-] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2346.785234] env[63279]: DEBUG nova.compute.manager [req-54b83b15-30e3-42b7-9e48-564d745c659f req-85f0147f-ab20-4204-a5ed-80f8eee451d8 service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Received event network-vif-plugged-48a709b0-3a67-4115-a178-0872536d2417 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2346.785234] env[63279]: DEBUG oslo_concurrency.lockutils [req-54b83b15-30e3-42b7-9e48-564d745c659f req-85f0147f-ab20-4204-a5ed-80f8eee451d8 service nova] Acquiring lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2346.785234] env[63279]: DEBUG oslo_concurrency.lockutils [req-54b83b15-30e3-42b7-9e48-564d745c659f req-85f0147f-ab20-4204-a5ed-80f8eee451d8 service nova] Lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2346.785234] env[63279]: DEBUG oslo_concurrency.lockutils [req-54b83b15-30e3-42b7-9e48-564d745c659f req-85f0147f-ab20-4204-a5ed-80f8eee451d8 service nova] Lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2346.785234] env[63279]: DEBUG nova.compute.manager [req-54b83b15-30e3-42b7-9e48-564d745c659f req-85f0147f-ab20-4204-a5ed-80f8eee451d8 service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] No waiting events found dispatching network-vif-plugged-48a709b0-3a67-4115-a178-0872536d2417 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2346.785234] env[63279]: WARNING nova.compute.manager [req-54b83b15-30e3-42b7-9e48-564d745c659f req-85f0147f-ab20-4204-a5ed-80f8eee451d8 service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Received unexpected event network-vif-plugged-48a709b0-3a67-4115-a178-0872536d2417 for instance with vm_state building and task_state spawning. [ 2346.888026] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2346.888026] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2346.888026] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleting the datastore file [datastore1] b7123d9a-bcaa-43c4-ac9f-982b3b146eb0 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2346.888026] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d2f3757-0a4a-4d12-a326-6cb7c64f2910 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.893864] env[63279]: DEBUG oslo_vmware.api [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2346.893864] env[63279]: value = "task-2088091" [ 2346.893864] env[63279]: _type = "Task" [ 2346.893864] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2346.902426] env[63279]: DEBUG oslo_vmware.api [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2346.935335] env[63279]: DEBUG nova.network.neutron [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Successfully updated port: 48a709b0-3a67-4115-a178-0872536d2417 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2346.943361] env[63279]: DEBUG nova.objects.base [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Object Instance<686a7ce2-2d07-411e-91d6-0471c55c3728> lazy-loaded attributes: flavor,info_cache {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2347.009392] env[63279]: DEBUG nova.compute.manager [req-37f695c0-a64d-4cc0-b979-91197f03e8ad req-912ba349-6ce3-478f-b66d-aed440c41a7b service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Received event network-vif-plugged-a05a3f68-d80e-46c9-9065-1f803e7c04ff {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2347.009392] env[63279]: DEBUG oslo_concurrency.lockutils [req-37f695c0-a64d-4cc0-b979-91197f03e8ad req-912ba349-6ce3-478f-b66d-aed440c41a7b service nova] Acquiring lock "36354325-dee0-406e-8eb6-bc3cf347a403-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.009392] env[63279]: DEBUG oslo_concurrency.lockutils [req-37f695c0-a64d-4cc0-b979-91197f03e8ad req-912ba349-6ce3-478f-b66d-aed440c41a7b service nova] Lock "36354325-dee0-406e-8eb6-bc3cf347a403-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2347.009392] env[63279]: DEBUG oslo_concurrency.lockutils [req-37f695c0-a64d-4cc0-b979-91197f03e8ad req-912ba349-6ce3-478f-b66d-aed440c41a7b service nova] Lock "36354325-dee0-406e-8eb6-bc3cf347a403-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2347.009392] env[63279]: DEBUG nova.compute.manager [req-37f695c0-a64d-4cc0-b979-91197f03e8ad req-912ba349-6ce3-478f-b66d-aed440c41a7b service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] No waiting events found dispatching network-vif-plugged-a05a3f68-d80e-46c9-9065-1f803e7c04ff {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2347.009392] env[63279]: WARNING nova.compute.manager [req-37f695c0-a64d-4cc0-b979-91197f03e8ad req-912ba349-6ce3-478f-b66d-aed440c41a7b service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Received unexpected event network-vif-plugged-a05a3f68-d80e-46c9-9065-1f803e7c04ff for instance with vm_state building and task_state spawning. [ 2347.034263] env[63279]: INFO nova.compute.manager [-] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Took 1.02 seconds to deallocate network for instance. [ 2347.098743] env[63279]: DEBUG nova.network.neutron [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Successfully updated port: a05a3f68-d80e-46c9-9065-1f803e7c04ff {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2347.255107] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "a6758131-030e-4b33-a2c9-8864055a5bec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.255386] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "a6758131-030e-4b33-a2c9-8864055a5bec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2347.255602] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "a6758131-030e-4b33-a2c9-8864055a5bec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.255791] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "a6758131-030e-4b33-a2c9-8864055a5bec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2347.255966] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "a6758131-030e-4b33-a2c9-8864055a5bec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2347.257979] env[63279]: INFO nova.compute.manager [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Terminating instance [ 2347.404836] env[63279]: DEBUG oslo_vmware.api [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150708} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2347.405121] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2347.405317] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2347.405500] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2347.405675] env[63279]: INFO nova.compute.manager [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Took 9.35 seconds to destroy the instance on the hypervisor. [ 2347.405912] env[63279]: DEBUG oslo.service.loopingcall [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2347.406119] env[63279]: DEBUG nova.compute.manager [-] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2347.406214] env[63279]: DEBUG nova.network.neutron [-] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2347.437995] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2347.438158] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2347.438306] env[63279]: DEBUG nova.network.neutron [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2347.541647] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.542030] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2347.542187] env[63279]: DEBUG nova.objects.instance [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Lazy-loading 'resources' on Instance uuid 1332faad-20be-4a81-b57e-171a49d5c427 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2347.601337] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2347.601544] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2347.601640] env[63279]: DEBUG nova.network.neutron [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2347.706928] env[63279]: DEBUG nova.network.neutron [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance_info_cache with network_info: [{"id": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "address": "fa:16:3e:6d:af:2f", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55a22eb2-68", "ovs_interfaceid": "55a22eb2-68fd-4cc1-8372-6fed483f16d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2347.761988] env[63279]: DEBUG nova.compute.manager [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2347.761988] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2347.762936] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292532a2-c6ec-4f9a-9e0e-116b930f2945 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.771868] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2347.772162] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a41b897c-5259-4f1a-a138-38d9bc4ae4bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.778140] env[63279]: DEBUG oslo_vmware.api [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2347.778140] env[63279]: value = "task-2088092" [ 2347.778140] env[63279]: _type = "Task" [ 2347.778140] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2347.786182] env[63279]: DEBUG oslo_vmware.api [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088092, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2347.974222] env[63279]: DEBUG nova.network.neutron [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2348.119170] env[63279]: DEBUG nova.network.neutron [-] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2348.138535] env[63279]: DEBUG nova.network.neutron [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2348.146590] env[63279]: DEBUG nova.network.neutron [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Updating instance_info_cache with network_info: [{"id": "48a709b0-3a67-4115-a178-0872536d2417", "address": "fa:16:3e:75:52:17", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48a709b0-3a", "ovs_interfaceid": "48a709b0-3a67-4115-a178-0872536d2417", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2348.209906] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "refresh_cache-686a7ce2-2d07-411e-91d6-0471c55c3728" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2348.238971] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6574c46b-af02-4b1f-b5cd-c83a29a04382 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.246649] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fda66e1-a4f2-4b0e-a8f8-be12c8e67583 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.288225] env[63279]: DEBUG nova.network.neutron [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Updating instance_info_cache with network_info: [{"id": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "address": "fa:16:3e:8e:d1:f4", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa05a3f68-d8", "ovs_interfaceid": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2348.293178] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ffaa8f-5855-46a8-a55b-89b40211ea8e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.301055] env[63279]: DEBUG oslo_vmware.api [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088092, 'name': PowerOffVM_Task, 'duration_secs': 0.20205} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2348.303041] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2348.303231] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2348.303507] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0cad2f23-2346-4f7d-a477-2bf6a9bd4037 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.305736] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a141fc47-8818-41e9-b1f8-56af9fe16d75 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.320160] env[63279]: DEBUG nova.compute.provider_tree [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2348.416619] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2348.416848] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2348.417123] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Deleting the datastore file [datastore1] a6758131-030e-4b33-a2c9-8864055a5bec {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2348.417419] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-855fe83a-4106-475a-a78d-ea53692a723c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.425345] env[63279]: DEBUG oslo_vmware.api [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2348.425345] env[63279]: value = "task-2088094" [ 2348.425345] env[63279]: _type = "Task" [ 2348.425345] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2348.432596] env[63279]: DEBUG oslo_vmware.api [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088094, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2348.623475] env[63279]: INFO nova.compute.manager [-] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Took 1.22 seconds to deallocate network for instance. [ 2348.649203] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2348.649561] env[63279]: DEBUG nova.compute.manager [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Instance network_info: |[{"id": "48a709b0-3a67-4115-a178-0872536d2417", "address": "fa:16:3e:75:52:17", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48a709b0-3a", "ovs_interfaceid": "48a709b0-3a67-4115-a178-0872536d2417", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2348.650018] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:52:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '48a709b0-3a67-4115-a178-0872536d2417', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2348.657242] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Creating folder: Project (f9427c264e8e41998f579af352cb48cf). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2348.658183] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dbe49f23-55b2-4afc-98dd-4093c729bb6f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.669031] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Created folder: Project (f9427c264e8e41998f579af352cb48cf) in parent group-v427491. [ 2348.669298] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Creating folder: Instances. Parent ref: group-v427784. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2348.669540] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48fe5d08-638d-4a57-8f66-f3d6b8a92a0b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.678856] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Created folder: Instances in parent group-v427784. [ 2348.679111] env[63279]: DEBUG oslo.service.loopingcall [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2348.679399] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2348.679612] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2093834e-a095-4c41-9e2a-d06be84bb5b0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.698182] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2348.698182] env[63279]: value = "task-2088097" [ 2348.698182] env[63279]: _type = "Task" [ 2348.698182] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2348.705776] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088097, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2348.796822] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2348.797158] env[63279]: DEBUG nova.compute.manager [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Instance network_info: |[{"id": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "address": "fa:16:3e:8e:d1:f4", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa05a3f68-d8", "ovs_interfaceid": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2348.798020] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:d1:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '41278529-8bd2-44a1-97c8-03967faa3ff7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a05a3f68-d80e-46c9-9065-1f803e7c04ff', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2348.805149] env[63279]: DEBUG oslo.service.loopingcall [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2348.805405] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2348.805658] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b185c304-7c1f-4e82-8c3e-df7a48da0e98 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.822222] env[63279]: DEBUG nova.compute.manager [req-d776270f-e159-47f6-87e9-e7ed01bfc0e6 req-5711ef56-ebe4-4d04-8fa1-53a27b54851d service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Received event network-changed-48a709b0-3a67-4115-a178-0872536d2417 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2348.822422] env[63279]: DEBUG nova.compute.manager [req-d776270f-e159-47f6-87e9-e7ed01bfc0e6 req-5711ef56-ebe4-4d04-8fa1-53a27b54851d service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Refreshing instance network info cache due to event network-changed-48a709b0-3a67-4115-a178-0872536d2417. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2348.822641] env[63279]: DEBUG oslo_concurrency.lockutils [req-d776270f-e159-47f6-87e9-e7ed01bfc0e6 req-5711ef56-ebe4-4d04-8fa1-53a27b54851d service nova] Acquiring lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2348.822788] env[63279]: DEBUG oslo_concurrency.lockutils [req-d776270f-e159-47f6-87e9-e7ed01bfc0e6 req-5711ef56-ebe4-4d04-8fa1-53a27b54851d service nova] Acquired lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2348.822949] env[63279]: DEBUG nova.network.neutron [req-d776270f-e159-47f6-87e9-e7ed01bfc0e6 req-5711ef56-ebe4-4d04-8fa1-53a27b54851d service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Refreshing network info cache for port 48a709b0-3a67-4115-a178-0872536d2417 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2348.829608] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2348.829608] env[63279]: value = "task-2088098" [ 2348.829608] env[63279]: _type = "Task" [ 2348.829608] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2348.840231] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088098, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2348.842514] env[63279]: ERROR nova.scheduler.client.report [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] [req-83a15994-bc1b-4d7d-bc34-54ed4dc8ccd6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-83a15994-bc1b-4d7d-bc34-54ed4dc8ccd6"}]} [ 2348.860745] env[63279]: DEBUG nova.scheduler.client.report [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2348.876607] env[63279]: DEBUG nova.scheduler.client.report [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2348.876880] env[63279]: DEBUG nova.compute.provider_tree [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2348.890162] env[63279]: DEBUG nova.scheduler.client.report [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2348.909928] env[63279]: DEBUG nova.scheduler.client.report [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2348.936468] env[63279]: DEBUG oslo_vmware.api [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088094, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154922} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2348.939266] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2348.939518] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2348.939749] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2348.939960] env[63279]: INFO nova.compute.manager [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Took 1.18 seconds to destroy the instance on the hypervisor. [ 2348.940257] env[63279]: DEBUG oslo.service.loopingcall [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2348.940734] env[63279]: DEBUG nova.compute.manager [-] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2348.940837] env[63279]: DEBUG nova.network.neutron [-] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2349.048625] env[63279]: DEBUG nova.compute.manager [req-19646275-139a-46fe-a450-5d5d25b2aa68 req-9fc14969-f4e3-4c6e-9113-bfd1c8931ba7 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Received event network-changed-a05a3f68-d80e-46c9-9065-1f803e7c04ff {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2349.048818] env[63279]: DEBUG nova.compute.manager [req-19646275-139a-46fe-a450-5d5d25b2aa68 req-9fc14969-f4e3-4c6e-9113-bfd1c8931ba7 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Refreshing instance network info cache due to event network-changed-a05a3f68-d80e-46c9-9065-1f803e7c04ff. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2349.049066] env[63279]: DEBUG oslo_concurrency.lockutils [req-19646275-139a-46fe-a450-5d5d25b2aa68 req-9fc14969-f4e3-4c6e-9113-bfd1c8931ba7 service nova] Acquiring lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2349.049219] env[63279]: DEBUG oslo_concurrency.lockutils [req-19646275-139a-46fe-a450-5d5d25b2aa68 req-9fc14969-f4e3-4c6e-9113-bfd1c8931ba7 service nova] Acquired lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2349.049383] env[63279]: DEBUG nova.network.neutron [req-19646275-139a-46fe-a450-5d5d25b2aa68 req-9fc14969-f4e3-4c6e-9113-bfd1c8931ba7 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Refreshing network info cache for port a05a3f68-d80e-46c9-9065-1f803e7c04ff {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2349.110593] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50116ae-5336-4262-98c5-402fbe38a088 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.118346] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85a630c-3820-473b-9a2d-2d692e31e869 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.150516] env[63279]: DEBUG oslo_concurrency.lockutils [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2349.151925] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c9dc40-beb5-4e38-bc73-879d323b779e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.159546] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8832ae5-2114-4c1f-be8d-be154336fad4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.175210] env[63279]: DEBUG nova.compute.provider_tree [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2349.209722] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088097, 'name': CreateVM_Task, 'duration_secs': 0.378152} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2349.209951] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2349.211573] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2349.211573] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2349.211573] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2349.213337] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9aba85ab-7af2-4e3d-86f7-44d21662df1f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.214171] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2349.216299] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-724eb5da-4fec-4a1d-8c86-ba3898be216c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.222401] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2349.222401] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a78049-e412-c5c0-f6aa-379a37b20a71" [ 2349.222401] env[63279]: _type = "Task" [ 2349.222401] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2349.226022] env[63279]: DEBUG oslo_vmware.api [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2349.226022] env[63279]: value = "task-2088099" [ 2349.226022] env[63279]: _type = "Task" [ 2349.226022] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2349.236939] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a78049-e412-c5c0-f6aa-379a37b20a71, 'name': SearchDatastore_Task, 'duration_secs': 0.009364} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2349.239915] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2349.240248] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2349.240499] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2349.240702] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2349.240924] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2349.241226] env[63279]: DEBUG oslo_vmware.api [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088099, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2349.241696] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a441664-1ade-42e9-ba6b-fd2075dbf33b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.249529] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2349.249764] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2349.251046] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1a9300f-b8bc-4986-ae47-c7a529ebb463 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.256081] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2349.256081] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]520c361f-5f3d-ca4e-523f-96c054b24c8a" [ 2349.256081] env[63279]: _type = "Task" [ 2349.256081] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2349.263960] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520c361f-5f3d-ca4e-523f-96c054b24c8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2349.339474] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088098, 'name': CreateVM_Task, 'duration_secs': 0.327889} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2349.339604] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2349.340194] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2349.340358] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2349.340679] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2349.341565] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ffea6a7-34ea-4212-8bcf-01ef5b24b1c3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.345824] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2349.345824] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a309e2-3c10-5fa6-4301-ea605f96097f" [ 2349.345824] env[63279]: _type = "Task" [ 2349.345824] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2349.353658] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a309e2-3c10-5fa6-4301-ea605f96097f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2349.609625] env[63279]: DEBUG nova.network.neutron [req-d776270f-e159-47f6-87e9-e7ed01bfc0e6 req-5711ef56-ebe4-4d04-8fa1-53a27b54851d service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Updated VIF entry in instance network info cache for port 48a709b0-3a67-4115-a178-0872536d2417. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2349.610092] env[63279]: DEBUG nova.network.neutron [req-d776270f-e159-47f6-87e9-e7ed01bfc0e6 req-5711ef56-ebe4-4d04-8fa1-53a27b54851d service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Updating instance_info_cache with network_info: [{"id": "48a709b0-3a67-4115-a178-0872536d2417", "address": "fa:16:3e:75:52:17", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48a709b0-3a", "ovs_interfaceid": "48a709b0-3a67-4115-a178-0872536d2417", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2349.719568] env[63279]: DEBUG nova.network.neutron [-] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2349.738143] env[63279]: DEBUG oslo_vmware.api [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088099, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2349.738710] env[63279]: DEBUG nova.scheduler.client.report [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 157 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2349.738957] env[63279]: DEBUG nova.compute.provider_tree [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 157 to 158 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2349.739158] env[63279]: DEBUG nova.compute.provider_tree [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2349.767074] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520c361f-5f3d-ca4e-523f-96c054b24c8a, 'name': SearchDatastore_Task, 'duration_secs': 0.008378} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2349.767443] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9494d97-72a6-449f-b5ba-6caa60f11552 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.772966] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2349.772966] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a060b8-7ea3-4705-e0c8-d79dd637dca9" [ 2349.772966] env[63279]: _type = "Task" [ 2349.772966] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2349.781141] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a060b8-7ea3-4705-e0c8-d79dd637dca9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2349.802638] env[63279]: DEBUG nova.network.neutron [req-19646275-139a-46fe-a450-5d5d25b2aa68 req-9fc14969-f4e3-4c6e-9113-bfd1c8931ba7 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Updated VIF entry in instance network info cache for port a05a3f68-d80e-46c9-9065-1f803e7c04ff. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2349.802995] env[63279]: DEBUG nova.network.neutron [req-19646275-139a-46fe-a450-5d5d25b2aa68 req-9fc14969-f4e3-4c6e-9113-bfd1c8931ba7 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Updating instance_info_cache with network_info: [{"id": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "address": "fa:16:3e:8e:d1:f4", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa05a3f68-d8", "ovs_interfaceid": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2349.856176] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a309e2-3c10-5fa6-4301-ea605f96097f, 'name': SearchDatastore_Task, 'duration_secs': 0.009227} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2349.856561] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2349.856825] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2349.857032] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2350.113023] env[63279]: DEBUG oslo_concurrency.lockutils [req-d776270f-e159-47f6-87e9-e7ed01bfc0e6 req-5711ef56-ebe4-4d04-8fa1-53a27b54851d service nova] Releasing lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2350.222886] env[63279]: INFO nova.compute.manager [-] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Took 1.28 seconds to deallocate network for instance. [ 2350.237041] env[63279]: DEBUG oslo_vmware.api [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088099, 'name': PowerOnVM_Task, 'duration_secs': 0.518968} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2350.237332] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2350.237528] env[63279]: DEBUG nova.compute.manager [None req-6d9a0f5d-543c-488f-84e6-adf9e74de6c1 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2350.238314] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3600f5-060a-4739-a9ca-5c5ad21cb156 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.246877] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.705s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2350.249505] env[63279]: DEBUG oslo_concurrency.lockutils [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.099s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2350.249817] env[63279]: DEBUG nova.objects.instance [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lazy-loading 'resources' on Instance uuid b7123d9a-bcaa-43c4-ac9f-982b3b146eb0 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2350.268143] env[63279]: INFO nova.scheduler.client.report [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Deleted allocations for instance 1332faad-20be-4a81-b57e-171a49d5c427 [ 2350.283890] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a060b8-7ea3-4705-e0c8-d79dd637dca9, 'name': SearchDatastore_Task, 'duration_secs': 0.039284} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2350.284195] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2350.284453] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] befc985f-68e2-4a04-8de0-9ca9bb3fa504/befc985f-68e2-4a04-8de0-9ca9bb3fa504.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2350.284736] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2350.284928] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2350.285162] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51c17463-ef56-46fb-9c27-0d06282aeb95 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.287118] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5753ea65-e112-4b23-82eb-3dd396ac0b14 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.295076] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2350.295076] env[63279]: value = "task-2088100" [ 2350.295076] env[63279]: _type = "Task" [ 2350.295076] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2350.299681] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2350.299871] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2350.301089] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-497f6314-6d9d-4444-aea7-4e767d43cea9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.307507] env[63279]: DEBUG oslo_concurrency.lockutils [req-19646275-139a-46fe-a450-5d5d25b2aa68 req-9fc14969-f4e3-4c6e-9113-bfd1c8931ba7 service nova] Releasing lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2350.307760] env[63279]: DEBUG nova.compute.manager [req-19646275-139a-46fe-a450-5d5d25b2aa68 req-9fc14969-f4e3-4c6e-9113-bfd1c8931ba7 service nova] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Received event network-vif-deleted-57c5b936-4022-4267-bafa-19945e91592b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2350.308127] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088100, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2350.311386] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2350.311386] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527acf2e-f2ec-26c4-34d1-5abe3c94d8fa" [ 2350.311386] env[63279]: _type = "Task" [ 2350.311386] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2350.318929] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527acf2e-f2ec-26c4-34d1-5abe3c94d8fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2350.729280] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2350.779541] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c2b6f3c7-43c6-43e8-96b4-bd11cfadffb0 tempest-ServersAaction247Test-961247548 tempest-ServersAaction247Test-961247548-project-member] Lock "1332faad-20be-4a81-b57e-171a49d5c427" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.424s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2350.808379] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088100, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485189} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2350.808556] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] befc985f-68e2-4a04-8de0-9ca9bb3fa504/befc985f-68e2-4a04-8de0-9ca9bb3fa504.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2350.808700] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2350.808954] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4cd352e-86d5-4e37-84a4-0d5bf0922265 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.816570] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2350.816570] env[63279]: value = "task-2088101" [ 2350.816570] env[63279]: _type = "Task" [ 2350.816570] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2350.825818] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527acf2e-f2ec-26c4-34d1-5abe3c94d8fa, 'name': SearchDatastore_Task, 'duration_secs': 0.00824} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2350.827513] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ad0debf-3b60-4fc8-966c-7741e7a743f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.832541] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088101, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2350.835843] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2350.835843] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b7797e-ac7b-3e35-0795-dae9aa4268e9" [ 2350.835843] env[63279]: _type = "Task" [ 2350.835843] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2350.845192] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b7797e-ac7b-3e35-0795-dae9aa4268e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2350.955924] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51c3c83-f2f6-430a-8c56-63d62cd9bcfc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.964284] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2080b1-85b6-4ca1-8c4d-8ab55fe5e769 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.994969] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97460572-7935-4de5-bccd-cfa8bf8fd644 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.002926] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66f5a44-607e-4fd5-b43d-0e4fea3890fd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.020216] env[63279]: DEBUG nova.compute.provider_tree [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2351.078490] env[63279]: DEBUG nova.compute.manager [req-115dd71c-99fd-4d9e-978d-476848541c80 req-28153b64-02cf-4b42-9d0a-a82b22694abc service nova] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Received event network-vif-deleted-376a7f06-34a9-4c6e-934f-5470b0a04549 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2351.327014] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088101, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077863} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2351.327333] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2351.328125] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075d4e34-0e63-465d-9499-71dab3fac651 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.352319] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] befc985f-68e2-4a04-8de0-9ca9bb3fa504/befc985f-68e2-4a04-8de0-9ca9bb3fa504.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2351.355936] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-276a4488-add3-4d6f-9727-34dcf01012a9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.377457] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b7797e-ac7b-3e35-0795-dae9aa4268e9, 'name': SearchDatastore_Task, 'duration_secs': 0.008659} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2351.377698] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2351.377839] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 36354325-dee0-406e-8eb6-bc3cf347a403/36354325-dee0-406e-8eb6-bc3cf347a403.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2351.378189] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2351.378189] env[63279]: value = "task-2088102" [ 2351.378189] env[63279]: _type = "Task" [ 2351.378189] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2351.378384] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9232573f-9023-41ce-b4db-2c46850b971f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.388276] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088102, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2351.390431] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2351.390431] env[63279]: value = "task-2088103" [ 2351.390431] env[63279]: _type = "Task" [ 2351.390431] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2351.398415] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088103, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2351.524127] env[63279]: DEBUG nova.scheduler.client.report [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2351.766966] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "686a7ce2-2d07-411e-91d6-0471c55c3728" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2351.767410] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "686a7ce2-2d07-411e-91d6-0471c55c3728" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2351.767566] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "686a7ce2-2d07-411e-91d6-0471c55c3728-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2351.767760] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "686a7ce2-2d07-411e-91d6-0471c55c3728-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2351.767977] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "686a7ce2-2d07-411e-91d6-0471c55c3728-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2351.770846] env[63279]: INFO nova.compute.manager [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Terminating instance [ 2351.891770] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088102, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2351.899995] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088103, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2352.029916] env[63279]: DEBUG oslo_concurrency.lockutils [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.780s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2352.032515] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.303s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2352.033301] env[63279]: DEBUG nova.objects.instance [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lazy-loading 'resources' on Instance uuid a6758131-030e-4b33-a2c9-8864055a5bec {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2352.056567] env[63279]: INFO nova.scheduler.client.report [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleted allocations for instance b7123d9a-bcaa-43c4-ac9f-982b3b146eb0 [ 2352.277025] env[63279]: DEBUG nova.compute.manager [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2352.277357] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2352.278243] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffc6388-2f62-4ea4-95b5-d784f414ede9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.285918] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2352.286148] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26d502d4-4f9d-4422-8a7e-8aa6427e97d9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.293056] env[63279]: DEBUG oslo_vmware.api [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2352.293056] env[63279]: value = "task-2088104" [ 2352.293056] env[63279]: _type = "Task" [ 2352.293056] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2352.299596] env[63279]: DEBUG oslo_vmware.api [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088104, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2352.393286] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088102, 'name': ReconfigVM_Task, 'duration_secs': 0.64009} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2352.396485] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Reconfigured VM instance instance-0000006d to attach disk [datastore1] befc985f-68e2-4a04-8de0-9ca9bb3fa504/befc985f-68e2-4a04-8de0-9ca9bb3fa504.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2352.397759] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01a73a54-e4ca-4e68-9828-13649fbc3aab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.404640] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088103, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.682593} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2352.406106] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 36354325-dee0-406e-8eb6-bc3cf347a403/36354325-dee0-406e-8eb6-bc3cf347a403.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2352.406341] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2352.406653] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2352.406653] env[63279]: value = "task-2088105" [ 2352.406653] env[63279]: _type = "Task" [ 2352.406653] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2352.406846] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ce23f2c-e18a-405e-a9f6-6dac596ba087 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.416882] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088105, 'name': Rename_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2352.418045] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2352.418045] env[63279]: value = "task-2088106" [ 2352.418045] env[63279]: _type = "Task" [ 2352.418045] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2352.425843] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088106, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2352.567829] env[63279]: DEBUG oslo_concurrency.lockutils [None req-878fde1b-ff89-4652-9f8c-ce1fd71ff4b4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "b7123d9a-bcaa-43c4-ac9f-982b3b146eb0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.023s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2352.701412] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90789359-92d8-4ba7-80e0-0efdf6881a8d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.709690] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-402f4a18-6948-4a14-8796-9d1f2c4feecc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.715934] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2352.739185] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2352.739352] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 2352.741676] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2c034c-a152-4415-b357-5baf077297c7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.749360] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc70882-a031-4d60-94db-80dadbd9085c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.762890] env[63279]: DEBUG nova.compute.provider_tree [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2352.802693] env[63279]: DEBUG oslo_vmware.api [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088104, 'name': PowerOffVM_Task, 'duration_secs': 0.3023} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2352.803017] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2352.803092] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2352.803324] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6f4ad97-ee4f-4c1d-889a-94d5f9638d61 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.918146] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088105, 'name': Rename_Task, 'duration_secs': 0.260322} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2352.918509] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2352.921316] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75a954fe-3dd1-419a-8a78-44a55a42a6ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.927252] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088106, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065793} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2352.928377] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2352.928733] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2352.928733] env[63279]: value = "task-2088108" [ 2352.928733] env[63279]: _type = "Task" [ 2352.928733] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2352.929381] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfca4e56-ecf2-487a-bac8-cc220ed8e86a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.956549] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 36354325-dee0-406e-8eb6-bc3cf347a403/36354325-dee0-406e-8eb6-bc3cf347a403.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2352.956858] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088108, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2352.957095] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-caca67e1-6d14-4554-8436-bde34ec3e461 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.975786] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2352.975786] env[63279]: value = "task-2088109" [ 2352.975786] env[63279]: _type = "Task" [ 2352.975786] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2352.983622] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088109, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.073070] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "df37c4f6-b492-4d6e-9ba1-950dbbb9a885" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2353.073361] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "df37c4f6-b492-4d6e-9ba1-950dbbb9a885" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2353.073575] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "df37c4f6-b492-4d6e-9ba1-950dbbb9a885-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2353.073770] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "df37c4f6-b492-4d6e-9ba1-950dbbb9a885-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2353.073950] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "df37c4f6-b492-4d6e-9ba1-950dbbb9a885-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2353.076291] env[63279]: INFO nova.compute.manager [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Terminating instance [ 2353.269627] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2353.269783] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquired lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2353.269930] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Forcefully refreshing network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2353.293310] env[63279]: DEBUG nova.scheduler.client.report [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 158 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2353.293563] env[63279]: DEBUG nova.compute.provider_tree [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 158 to 159 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2353.293754] env[63279]: DEBUG nova.compute.provider_tree [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2353.441639] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088108, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.485410] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088109, 'name': ReconfigVM_Task, 'duration_secs': 0.307994} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2353.485710] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 36354325-dee0-406e-8eb6-bc3cf347a403/36354325-dee0-406e-8eb6-bc3cf347a403.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2353.486465] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-373f89bf-f9bf-4a94-b177-8ca8475458c9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.493049] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2353.493049] env[63279]: value = "task-2088110" [ 2353.493049] env[63279]: _type = "Task" [ 2353.493049] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2353.500043] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088110, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.580439] env[63279]: DEBUG nova.compute.manager [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2353.580839] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2353.581725] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09eb33f-a558-4813-aa73-f36ead9bd0fa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.589108] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2353.589345] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e417440-d9a9-4c10-af05-86a8191d0f95 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.595261] env[63279]: DEBUG oslo_vmware.api [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2353.595261] env[63279]: value = "task-2088111" [ 2353.595261] env[63279]: _type = "Task" [ 2353.595261] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2353.604046] env[63279]: DEBUG oslo_vmware.api [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088111, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.799011] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.766s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2353.817495] env[63279]: INFO nova.scheduler.client.report [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Deleted allocations for instance a6758131-030e-4b33-a2c9-8864055a5bec [ 2353.942727] env[63279]: DEBUG oslo_vmware.api [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088108, 'name': PowerOnVM_Task, 'duration_secs': 0.554038} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2353.942991] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2353.943210] env[63279]: INFO nova.compute.manager [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Took 11.95 seconds to spawn the instance on the hypervisor. [ 2353.943394] env[63279]: DEBUG nova.compute.manager [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2353.944151] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54771f3-e195-4a03-a78f-706372ea136c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.001899] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088110, 'name': Rename_Task, 'duration_secs': 0.13997} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2354.002173] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2354.002370] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-296cf413-3ad5-4dd2-b85c-eebe62b77218 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.007754] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2354.007754] env[63279]: value = "task-2088112" [ 2354.007754] env[63279]: _type = "Task" [ 2354.007754] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2354.014894] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088112, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2354.105266] env[63279]: DEBUG oslo_vmware.api [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088111, 'name': PowerOffVM_Task, 'duration_secs': 0.179261} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2354.105560] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2354.105744] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2354.106062] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b9ede69-89f5-4698-977a-8ddf1b1485b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.324683] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bbf0a56c-0766-4a55-b924-bacb35121eeb tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "a6758131-030e-4b33-a2c9-8864055a5bec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.069s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2354.460865] env[63279]: INFO nova.compute.manager [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Took 17.34 seconds to build instance. [ 2354.522935] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088112, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2354.631660] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2354.631950] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2354.632174] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleting the datastore file [datastore1] 686a7ce2-2d07-411e-91d6-0471c55c3728 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2354.632453] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f9bea2f-6d81-4465-b4c1-61ff9e14ef97 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.639300] env[63279]: DEBUG oslo_vmware.api [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2354.639300] env[63279]: value = "task-2088114" [ 2354.639300] env[63279]: _type = "Task" [ 2354.639300] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2354.645225] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance_info_cache with network_info: [{"id": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "address": "fa:16:3e:cc:16:0a", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7baa1106-63", "ovs_interfaceid": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2354.652031] env[63279]: DEBUG oslo_vmware.api [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2354.684790] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2354.685044] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2354.685383] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleting the datastore file [datastore1] df37c4f6-b492-4d6e-9ba1-950dbbb9a885 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2354.685686] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-18fd45dd-60a8-4463-8d5f-32ee32b082a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.691932] env[63279]: DEBUG oslo_vmware.api [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2354.691932] env[63279]: value = "task-2088115" [ 2354.691932] env[63279]: _type = "Task" [ 2354.691932] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2354.701760] env[63279]: DEBUG oslo_vmware.api [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2354.963364] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c970398a-27d0-4c60-a6af-2e10ca329454 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.862s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2355.018637] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088112, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2355.091484] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "4ce17bdb-4bed-4e06-af13-e4097b55e17d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.091758] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "4ce17bdb-4bed-4e06-af13-e4097b55e17d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2355.091978] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "4ce17bdb-4bed-4e06-af13-e4097b55e17d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.092184] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "4ce17bdb-4bed-4e06-af13-e4097b55e17d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2355.092363] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "4ce17bdb-4bed-4e06-af13-e4097b55e17d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2355.094536] env[63279]: INFO nova.compute.manager [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Terminating instance [ 2355.149758] env[63279]: DEBUG oslo_vmware.api [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088114, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156798} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2355.150045] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2355.150270] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2355.150423] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2355.150730] env[63279]: INFO nova.compute.manager [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Took 2.87 seconds to destroy the instance on the hypervisor. [ 2355.151042] env[63279]: DEBUG oslo.service.loopingcall [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2355.151469] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Releasing lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2355.151766] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updated the network info_cache for instance {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10518}} [ 2355.152015] env[63279]: DEBUG nova.compute.manager [-] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2355.152274] env[63279]: DEBUG nova.network.neutron [-] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2355.154332] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.154544] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.155067] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.155261] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.155398] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.155548] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.155677] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 2355.155824] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.207217] env[63279]: DEBUG oslo_vmware.api [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088115, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127509} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2355.207503] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2355.207698] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2355.207861] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2355.208044] env[63279]: INFO nova.compute.manager [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Took 1.63 seconds to destroy the instance on the hypervisor. [ 2355.208288] env[63279]: DEBUG oslo.service.loopingcall [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2355.208471] env[63279]: DEBUG nova.compute.manager [-] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2355.208557] env[63279]: DEBUG nova.network.neutron [-] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2355.224834] env[63279]: DEBUG nova.compute.manager [req-3d2538a9-7152-4940-8d84-d3ca8ea71c86 req-dc036ddc-d790-4876-be88-ab40d251e20a service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Received event network-changed-48a709b0-3a67-4115-a178-0872536d2417 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2355.225041] env[63279]: DEBUG nova.compute.manager [req-3d2538a9-7152-4940-8d84-d3ca8ea71c86 req-dc036ddc-d790-4876-be88-ab40d251e20a service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Refreshing instance network info cache due to event network-changed-48a709b0-3a67-4115-a178-0872536d2417. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2355.225253] env[63279]: DEBUG oslo_concurrency.lockutils [req-3d2538a9-7152-4940-8d84-d3ca8ea71c86 req-dc036ddc-d790-4876-be88-ab40d251e20a service nova] Acquiring lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2355.225395] env[63279]: DEBUG oslo_concurrency.lockutils [req-3d2538a9-7152-4940-8d84-d3ca8ea71c86 req-dc036ddc-d790-4876-be88-ab40d251e20a service nova] Acquired lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2355.225553] env[63279]: DEBUG nova.network.neutron [req-3d2538a9-7152-4940-8d84-d3ca8ea71c86 req-dc036ddc-d790-4876-be88-ab40d251e20a service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Refreshing network info cache for port 48a709b0-3a67-4115-a178-0872536d2417 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2355.510128] env[63279]: DEBUG nova.compute.manager [req-f1a547b0-c7b0-40c0-a764-1ddee7501de5 req-cc071194-d79b-4ea0-b13d-5da11089239b service nova] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Received event network-vif-deleted-086b189c-99af-4fd5-a587-66ec372c40fe {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2355.510128] env[63279]: INFO nova.compute.manager [req-f1a547b0-c7b0-40c0-a764-1ddee7501de5 req-cc071194-d79b-4ea0-b13d-5da11089239b service nova] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Neutron deleted interface 086b189c-99af-4fd5-a587-66ec372c40fe; detaching it from the instance and deleting it from the info cache [ 2355.510128] env[63279]: DEBUG nova.network.neutron [req-f1a547b0-c7b0-40c0-a764-1ddee7501de5 req-cc071194-d79b-4ea0-b13d-5da11089239b service nova] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2355.520852] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088112, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2355.598408] env[63279]: DEBUG nova.compute.manager [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2355.598650] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2355.599644] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d938458a-5119-4767-a5ad-95e123f6d783 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.607816] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2355.607816] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ddac6313-1fbc-4c75-ba7a-a531081b3746 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.614502] env[63279]: DEBUG oslo_vmware.api [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2355.614502] env[63279]: value = "task-2088116" [ 2355.614502] env[63279]: _type = "Task" [ 2355.614502] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2355.623160] env[63279]: DEBUG oslo_vmware.api [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088116, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2355.659228] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.659474] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2355.659644] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2355.659800] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2355.660726] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb22376-fbc2-44a8-abad-c16f4bf4669b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.670147] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a858bdc-2e3b-44e3-9288-619010e5a649 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.689022] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561fb7d7-b6e1-4895-888c-b2ab343fcc63 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.693096] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792d2b62-bcdc-4df6-83bb-03025e4e18e8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.730226] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179216MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2355.733096] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.733096] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2355.971563] env[63279]: DEBUG nova.network.neutron [req-3d2538a9-7152-4940-8d84-d3ca8ea71c86 req-dc036ddc-d790-4876-be88-ab40d251e20a service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Updated VIF entry in instance network info cache for port 48a709b0-3a67-4115-a178-0872536d2417. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2355.971943] env[63279]: DEBUG nova.network.neutron [req-3d2538a9-7152-4940-8d84-d3ca8ea71c86 req-dc036ddc-d790-4876-be88-ab40d251e20a service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Updating instance_info_cache with network_info: [{"id": "48a709b0-3a67-4115-a178-0872536d2417", "address": "fa:16:3e:75:52:17", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48a709b0-3a", "ovs_interfaceid": "48a709b0-3a67-4115-a178-0872536d2417", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2355.988852] env[63279]: DEBUG nova.network.neutron [-] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2356.015652] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73b8808a-0dc6-4eac-8621-89e3f809d423 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.023461] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088112, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2356.027074] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e6cb93-ce0e-40e2-8d09-1d444fb79c11 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.058072] env[63279]: DEBUG nova.compute.manager [req-f1a547b0-c7b0-40c0-a764-1ddee7501de5 req-cc071194-d79b-4ea0-b13d-5da11089239b service nova] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Detach interface failed, port_id=086b189c-99af-4fd5-a587-66ec372c40fe, reason: Instance df37c4f6-b492-4d6e-9ba1-950dbbb9a885 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2356.108579] env[63279]: DEBUG nova.network.neutron [-] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2356.125245] env[63279]: DEBUG oslo_vmware.api [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088116, 'name': PowerOffVM_Task, 'duration_secs': 0.209322} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2356.125517] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2356.125689] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2356.125955] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d5c1317f-a14e-4bd9-916f-74fc52efea2d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.243103] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2356.243308] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2356.243378] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Deleting the datastore file [datastore1] 4ce17bdb-4bed-4e06-af13-e4097b55e17d {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2356.243903] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5857f5c-63cb-432d-9df6-237abddf2914 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.253471] env[63279]: DEBUG oslo_vmware.api [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for the task: (returnval){ [ 2356.253471] env[63279]: value = "task-2088118" [ 2356.253471] env[63279]: _type = "Task" [ 2356.253471] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2356.261173] env[63279]: DEBUG oslo_vmware.api [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088118, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2356.475404] env[63279]: DEBUG oslo_concurrency.lockutils [req-3d2538a9-7152-4940-8d84-d3ca8ea71c86 req-dc036ddc-d790-4876-be88-ab40d251e20a service nova] Releasing lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2356.491170] env[63279]: INFO nova.compute.manager [-] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Took 1.28 seconds to deallocate network for instance. [ 2356.523045] env[63279]: DEBUG oslo_vmware.api [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088112, 'name': PowerOnVM_Task, 'duration_secs': 2.265105} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2356.523337] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2356.523542] env[63279]: INFO nova.compute.manager [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Took 16.81 seconds to spawn the instance on the hypervisor. [ 2356.523760] env[63279]: DEBUG nova.compute.manager [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2356.524603] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c99fc4c-daca-4978-82e1-fd07b5226e20 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.610776] env[63279]: INFO nova.compute.manager [-] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Took 1.46 seconds to deallocate network for instance. [ 2356.762048] env[63279]: DEBUG oslo_vmware.api [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Task: {'id': task-2088118, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134729} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2356.762327] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2356.762515] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2356.762710] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2356.762887] env[63279]: INFO nova.compute.manager [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2356.763126] env[63279]: DEBUG oslo.service.loopingcall [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2356.763323] env[63279]: DEBUG nova.compute.manager [-] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2356.763419] env[63279]: DEBUG nova.network.neutron [-] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2356.765603] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance fcdd3076-2b53-4850-9730-2f877e2cabfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.765743] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 4ce17bdb-4bed-4e06-af13-e4097b55e17d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.765866] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 69b3269a-2ba3-4f5f-a29c-62518c93da3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.766080] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 1fca4e5c-fe2c-4b61-bed4-52c7770def7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.766139] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 19e10ee4-99d1-44b9-9354-4c162d541a1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.766220] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance df37c4f6-b492-4d6e-9ba1-950dbbb9a885 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.766333] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance efddec10-b496-446e-a05a-72c9f2d86ed9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.766442] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 6f839780-be92-4d99-a96d-1fc14c819599 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.766551] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 686a7ce2-2d07-411e-91d6-0471c55c3728 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.766662] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 36354325-dee0-406e-8eb6-bc3cf347a403 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.766772] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance befc985f-68e2-4a04-8de0-9ca9bb3fa504 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.766953] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2356.767108] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2688MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2356.902722] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b7f6aa-b466-46c4-a296-b40f351d830b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.910401] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a071f5c2-827f-4282-962f-0ca92c19385b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.939858] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1920f0-3347-4b43-bfb1-e0494ad2222b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.947661] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ac668b-f006-4ef5-8910-fc3c7708fc27 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.961665] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2356.996889] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2357.043009] env[63279]: INFO nova.compute.manager [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Took 22.42 seconds to build instance. [ 2357.116675] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2357.253456] env[63279]: DEBUG nova.compute.manager [req-13370283-d90f-43b5-9244-0d210cc407ab req-7492413b-e11f-4582-84f1-15898fe8754e service nova] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Received event network-vif-deleted-55a22eb2-68fd-4cc1-8372-6fed483f16d0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2357.253670] env[63279]: DEBUG nova.compute.manager [req-13370283-d90f-43b5-9244-0d210cc407ab req-7492413b-e11f-4582-84f1-15898fe8754e service nova] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Received event network-vif-deleted-72b975f4-3f92-4d65-8d8f-119eeb59c2ab {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2357.253842] env[63279]: INFO nova.compute.manager [req-13370283-d90f-43b5-9244-0d210cc407ab req-7492413b-e11f-4582-84f1-15898fe8754e service nova] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Neutron deleted interface 72b975f4-3f92-4d65-8d8f-119eeb59c2ab; detaching it from the instance and deleting it from the info cache [ 2357.254017] env[63279]: DEBUG nova.network.neutron [req-13370283-d90f-43b5-9244-0d210cc407ab req-7492413b-e11f-4582-84f1-15898fe8754e service nova] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2357.492667] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 159 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2357.492922] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 159 to 160 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2357.493120] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2357.516511] env[63279]: DEBUG nova.network.neutron [-] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2357.544590] env[63279]: DEBUG oslo_concurrency.lockutils [None req-54b6b0b4-1418-4b3d-beea-ba6d37b5d83a tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "36354325-dee0-406e-8eb6-bc3cf347a403" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.930s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2357.756841] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f2e2abe3-e633-4f3e-9edc-7e5b2401976d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.766344] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea01943-0c8a-4258-8e95-b509cfc1c6e8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.795457] env[63279]: DEBUG nova.compute.manager [req-13370283-d90f-43b5-9244-0d210cc407ab req-7492413b-e11f-4582-84f1-15898fe8754e service nova] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Detach interface failed, port_id=72b975f4-3f92-4d65-8d8f-119eeb59c2ab, reason: Instance 4ce17bdb-4bed-4e06-af13-e4097b55e17d could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2357.997864] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2357.998120] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.267s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2357.998446] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.002s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2357.998758] env[63279]: DEBUG nova.objects.instance [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lazy-loading 'resources' on Instance uuid df37c4f6-b492-4d6e-9ba1-950dbbb9a885 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2358.018990] env[63279]: INFO nova.compute.manager [-] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Took 1.26 seconds to deallocate network for instance. [ 2358.526997] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2358.656334] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d2bcf97-e7e8-452a-bad3-f47a18362458 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.664833] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5015e353-641a-4bcc-b516-0cbdc4227c54 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.696592] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fae2c1-ff93-4a8d-8421-afdfe3266d46 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.704237] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d897c267-3f25-4340-8fd7-04062f1480ef {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.717589] env[63279]: DEBUG nova.compute.provider_tree [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2359.222104] env[63279]: DEBUG nova.scheduler.client.report [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2359.298366] env[63279]: DEBUG nova.compute.manager [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Received event network-changed-d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2359.298570] env[63279]: DEBUG nova.compute.manager [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Refreshing instance network info cache due to event network-changed-d0192def-50a0-40c0-9921-fbdf13e63ffb. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2359.298793] env[63279]: DEBUG oslo_concurrency.lockutils [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] Acquiring lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2359.298941] env[63279]: DEBUG oslo_concurrency.lockutils [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] Acquired lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2359.299118] env[63279]: DEBUG nova.network.neutron [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Refreshing network info cache for port d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2359.536118] env[63279]: DEBUG nova.compute.manager [req-c5c50fc9-5f4f-44c4-9a8f-222494cf1106 req-02f87c77-4bc1-4fea-b571-f19de25d7142 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Received event network-changed-a05a3f68-d80e-46c9-9065-1f803e7c04ff {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2359.536431] env[63279]: DEBUG nova.compute.manager [req-c5c50fc9-5f4f-44c4-9a8f-222494cf1106 req-02f87c77-4bc1-4fea-b571-f19de25d7142 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Refreshing instance network info cache due to event network-changed-a05a3f68-d80e-46c9-9065-1f803e7c04ff. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2359.536564] env[63279]: DEBUG oslo_concurrency.lockutils [req-c5c50fc9-5f4f-44c4-9a8f-222494cf1106 req-02f87c77-4bc1-4fea-b571-f19de25d7142 service nova] Acquiring lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2359.536690] env[63279]: DEBUG oslo_concurrency.lockutils [req-c5c50fc9-5f4f-44c4-9a8f-222494cf1106 req-02f87c77-4bc1-4fea-b571-f19de25d7142 service nova] Acquired lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2359.536858] env[63279]: DEBUG nova.network.neutron [req-c5c50fc9-5f4f-44c4-9a8f-222494cf1106 req-02f87c77-4bc1-4fea-b571-f19de25d7142 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Refreshing network info cache for port a05a3f68-d80e-46c9-9065-1f803e7c04ff {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2359.726841] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.728s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2359.729164] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.613s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2359.729404] env[63279]: DEBUG nova.objects.instance [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'resources' on Instance uuid 686a7ce2-2d07-411e-91d6-0471c55c3728 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2359.749427] env[63279]: INFO nova.scheduler.client.report [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleted allocations for instance df37c4f6-b492-4d6e-9ba1-950dbbb9a885 [ 2360.009725] env[63279]: DEBUG nova.network.neutron [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updated VIF entry in instance network info cache for port d0192def-50a0-40c0-9921-fbdf13e63ffb. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2360.010112] env[63279]: DEBUG nova.network.neutron [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updating instance_info_cache with network_info: [{"id": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "address": "fa:16:3e:0c:f0:a6", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0192def-50", "ovs_interfaceid": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2360.228091] env[63279]: DEBUG nova.network.neutron [req-c5c50fc9-5f4f-44c4-9a8f-222494cf1106 req-02f87c77-4bc1-4fea-b571-f19de25d7142 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Updated VIF entry in instance network info cache for port a05a3f68-d80e-46c9-9065-1f803e7c04ff. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2360.228455] env[63279]: DEBUG nova.network.neutron [req-c5c50fc9-5f4f-44c4-9a8f-222494cf1106 req-02f87c77-4bc1-4fea-b571-f19de25d7142 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Updating instance_info_cache with network_info: [{"id": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "address": "fa:16:3e:8e:d1:f4", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa05a3f68-d8", "ovs_interfaceid": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2360.257219] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7112024f-e033-4da7-b2f9-cb0e024d7ba1 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "df37c4f6-b492-4d6e-9ba1-950dbbb9a885" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.184s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2360.367408] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c065040c-a8d6-4bf9-9f60-265aacbe2331 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.375309] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b18405-7e0d-477a-ad37-c73b05c7d99b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.405059] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2af7c16-61f9-4554-b88f-e38c36809428 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.412696] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea5af93-4256-41b1-b501-1aacbe96ff94 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.425557] env[63279]: DEBUG nova.compute.provider_tree [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2360.512901] env[63279]: DEBUG oslo_concurrency.lockutils [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] Releasing lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2360.513159] env[63279]: DEBUG nova.compute.manager [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Received event network-changed-a05a3f68-d80e-46c9-9065-1f803e7c04ff {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2360.513334] env[63279]: DEBUG nova.compute.manager [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Refreshing instance network info cache due to event network-changed-a05a3f68-d80e-46c9-9065-1f803e7c04ff. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2360.513528] env[63279]: DEBUG oslo_concurrency.lockutils [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] Acquiring lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2360.730871] env[63279]: DEBUG oslo_concurrency.lockutils [req-c5c50fc9-5f4f-44c4-9a8f-222494cf1106 req-02f87c77-4bc1-4fea-b571-f19de25d7142 service nova] Releasing lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2360.731494] env[63279]: DEBUG oslo_concurrency.lockutils [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] Acquired lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2360.731794] env[63279]: DEBUG nova.network.neutron [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Refreshing network info cache for port a05a3f68-d80e-46c9-9065-1f803e7c04ff {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2360.929238] env[63279]: DEBUG nova.scheduler.client.report [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2361.258540] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "211f30e0-f5ab-4c05-975a-63fdc4fed7c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2361.258913] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "211f30e0-f5ab-4c05-975a-63fdc4fed7c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2361.434029] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.705s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2361.435970] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.909s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2361.436222] env[63279]: DEBUG nova.objects.instance [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lazy-loading 'resources' on Instance uuid 4ce17bdb-4bed-4e06-af13-e4097b55e17d {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2361.457370] env[63279]: INFO nova.scheduler.client.report [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleted allocations for instance 686a7ce2-2d07-411e-91d6-0471c55c3728 [ 2361.463742] env[63279]: DEBUG nova.network.neutron [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Updated VIF entry in instance network info cache for port a05a3f68-d80e-46c9-9065-1f803e7c04ff. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2361.464016] env[63279]: DEBUG nova.network.neutron [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Updating instance_info_cache with network_info: [{"id": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "address": "fa:16:3e:8e:d1:f4", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa05a3f68-d8", "ovs_interfaceid": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2361.567553] env[63279]: DEBUG nova.compute.manager [req-120b789b-d395-4b92-8b2c-1ce62e0ab479 req-a387f98b-d193-4b86-b42e-784942d47088 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Received event network-changed-d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2361.567792] env[63279]: DEBUG nova.compute.manager [req-120b789b-d395-4b92-8b2c-1ce62e0ab479 req-a387f98b-d193-4b86-b42e-784942d47088 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Refreshing instance network info cache due to event network-changed-d0192def-50a0-40c0-9921-fbdf13e63ffb. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2361.568041] env[63279]: DEBUG oslo_concurrency.lockutils [req-120b789b-d395-4b92-8b2c-1ce62e0ab479 req-a387f98b-d193-4b86-b42e-784942d47088 service nova] Acquiring lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2361.568195] env[63279]: DEBUG oslo_concurrency.lockutils [req-120b789b-d395-4b92-8b2c-1ce62e0ab479 req-a387f98b-d193-4b86-b42e-784942d47088 service nova] Acquired lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2361.568378] env[63279]: DEBUG nova.network.neutron [req-120b789b-d395-4b92-8b2c-1ce62e0ab479 req-a387f98b-d193-4b86-b42e-784942d47088 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Refreshing network info cache for port d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2361.761472] env[63279]: DEBUG nova.compute.manager [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2361.965795] env[63279]: DEBUG oslo_concurrency.lockutils [req-4c243868-b260-4c59-9361-4bf006cf5911 req-3fa4342a-1446-4449-ac29-1133665fa202 service nova] Releasing lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2361.966492] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e7f52804-8c70-49ab-b511-982e52b419bf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "686a7ce2-2d07-411e-91d6-0471c55c3728" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.199s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2362.070476] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8190385e-94c1-417c-9f09-94ecf4514045 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.080049] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86cfae2c-fc89-453d-83f7-4ed22ca9a25a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.110527] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ebf2b6-c8d1-49bd-9e53-ff2aaeefc4b3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.117894] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f017140-bd50-46b7-84d8-26df2c3bfa43 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.131811] env[63279]: DEBUG nova.compute.provider_tree [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2362.282474] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2362.303419] env[63279]: DEBUG nova.network.neutron [req-120b789b-d395-4b92-8b2c-1ce62e0ab479 req-a387f98b-d193-4b86-b42e-784942d47088 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updated VIF entry in instance network info cache for port d0192def-50a0-40c0-9921-fbdf13e63ffb. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2362.303800] env[63279]: DEBUG nova.network.neutron [req-120b789b-d395-4b92-8b2c-1ce62e0ab479 req-a387f98b-d193-4b86-b42e-784942d47088 service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updating instance_info_cache with network_info: [{"id": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "address": "fa:16:3e:0c:f0:a6", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0192def-50", "ovs_interfaceid": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2362.635354] env[63279]: DEBUG nova.scheduler.client.report [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2362.806330] env[63279]: DEBUG oslo_concurrency.lockutils [req-120b789b-d395-4b92-8b2c-1ce62e0ab479 req-a387f98b-d193-4b86-b42e-784942d47088 service nova] Releasing lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2363.141515] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.705s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2363.144133] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.862s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2363.145736] env[63279]: INFO nova.compute.claims [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2363.162119] env[63279]: INFO nova.scheduler.client.report [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Deleted allocations for instance 4ce17bdb-4bed-4e06-af13-e4097b55e17d [ 2363.465989] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "246b5346-93b1-4a84-921c-d028f3554d3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2363.466319] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "246b5346-93b1-4a84-921c-d028f3554d3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2363.668556] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0092a195-2322-4776-92bb-3cd2e09aef15 tempest-ServerRescueNegativeTestJSON-287528819 tempest-ServerRescueNegativeTestJSON-287528819-project-member] Lock "4ce17bdb-4bed-4e06-af13-e4097b55e17d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.577s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2363.971197] env[63279]: DEBUG nova.compute.manager [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2364.301963] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35fd8cf-00ea-4a45-8a96-39452d179aeb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.311020] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b526fb4e-df02-408e-8b8f-9669344a2c09 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.342586] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c187eb1c-7e66-454a-ba3e-bf8f5dadd47e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.350545] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08daada9-a0c6-45c9-b37a-47e5e2717ee0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.364315] env[63279]: DEBUG nova.compute.provider_tree [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2364.489912] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2364.664630] env[63279]: DEBUG oslo_concurrency.lockutils [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "1fca4e5c-fe2c-4b61-bed4-52c7770def7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2364.664958] env[63279]: DEBUG oslo_concurrency.lockutils [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "1fca4e5c-fe2c-4b61-bed4-52c7770def7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2364.665723] env[63279]: DEBUG oslo_concurrency.lockutils [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "1fca4e5c-fe2c-4b61-bed4-52c7770def7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2364.665994] env[63279]: DEBUG oslo_concurrency.lockutils [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "1fca4e5c-fe2c-4b61-bed4-52c7770def7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2364.666234] env[63279]: DEBUG oslo_concurrency.lockutils [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "1fca4e5c-fe2c-4b61-bed4-52c7770def7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2364.670062] env[63279]: INFO nova.compute.manager [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Terminating instance [ 2364.868097] env[63279]: DEBUG nova.scheduler.client.report [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2365.174045] env[63279]: DEBUG nova.compute.manager [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2365.174351] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2365.175316] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af356504-9b4a-4e84-a52e-6ff25991d454 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.183468] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2365.183780] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b121aaa0-ef28-4838-85b5-bb1d2f3bb8ea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.190035] env[63279]: DEBUG oslo_vmware.api [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2365.190035] env[63279]: value = "task-2088119" [ 2365.190035] env[63279]: _type = "Task" [ 2365.190035] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2365.197760] env[63279]: DEBUG oslo_vmware.api [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088119, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2365.372908] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2365.373521] env[63279]: DEBUG nova.compute.manager [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2365.375985] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.886s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2365.377392] env[63279]: INFO nova.compute.claims [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2365.701226] env[63279]: DEBUG oslo_vmware.api [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088119, 'name': PowerOffVM_Task, 'duration_secs': 0.206796} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2365.701583] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2365.701633] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2365.701891] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eded78a7-24b6-4d1e-bd9c-752500594915 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.840336] env[63279]: DEBUG oslo_concurrency.lockutils [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "interface-6f839780-be92-4d99-a96d-1fc14c819599-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2365.840597] env[63279]: DEBUG oslo_concurrency.lockutils [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-6f839780-be92-4d99-a96d-1fc14c819599-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2365.841060] env[63279]: DEBUG nova.objects.instance [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'flavor' on Instance uuid 6f839780-be92-4d99-a96d-1fc14c819599 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2365.881274] env[63279]: DEBUG nova.compute.utils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2365.884288] env[63279]: DEBUG nova.compute.manager [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2365.884450] env[63279]: DEBUG nova.network.neutron [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2365.923012] env[63279]: DEBUG nova.policy [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55199c771de04067a936194078ef99f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4fcef39e334249afb9636455802059c5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2366.192536] env[63279]: DEBUG nova.network.neutron [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Successfully created port: a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2366.387494] env[63279]: DEBUG nova.compute.manager [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2366.446180] env[63279]: DEBUG nova.objects.instance [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'pci_requests' on Instance uuid 6f839780-be92-4d99-a96d-1fc14c819599 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2366.517583] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543756fb-36ca-4407-8885-ad73d5d9f371 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.525305] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8d51e0-da41-443a-93f3-4d89f097c9c5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.553749] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66ea756-4301-43fa-bce4-9b0975df8aa2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.560560] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acfb9ff8-e7f3-4b77-9821-692b5dab621a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.574318] env[63279]: DEBUG nova.compute.provider_tree [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2366.948407] env[63279]: DEBUG nova.objects.base [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Object Instance<6f839780-be92-4d99-a96d-1fc14c819599> lazy-loaded attributes: flavor,pci_requests {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2366.948746] env[63279]: DEBUG nova.network.neutron [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2367.020932] env[63279]: DEBUG nova.policy [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6343b207f7294f5fa2a8111940083fb0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5b21bc5072e4945a19a782dd9561709', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2367.077070] env[63279]: DEBUG nova.scheduler.client.report [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2367.398235] env[63279]: DEBUG nova.compute.manager [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2367.426851] env[63279]: DEBUG nova.virt.hardware [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2367.427129] env[63279]: DEBUG nova.virt.hardware [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2367.427292] env[63279]: DEBUG nova.virt.hardware [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2367.427476] env[63279]: DEBUG nova.virt.hardware [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2367.427625] env[63279]: DEBUG nova.virt.hardware [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2367.427773] env[63279]: DEBUG nova.virt.hardware [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2367.427989] env[63279]: DEBUG nova.virt.hardware [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2367.428174] env[63279]: DEBUG nova.virt.hardware [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2367.428353] env[63279]: DEBUG nova.virt.hardware [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2367.428517] env[63279]: DEBUG nova.virt.hardware [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2367.428696] env[63279]: DEBUG nova.virt.hardware [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2367.429687] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a29665-23d6-4ad3-a286-dd010e99c58b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.438446] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25417fc7-4e9a-4285-b6bb-a5dec1a699e2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.582221] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.206s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2367.582759] env[63279]: DEBUG nova.compute.manager [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2368.087663] env[63279]: DEBUG nova.compute.utils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2368.089171] env[63279]: DEBUG nova.compute.manager [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2368.089335] env[63279]: DEBUG nova.network.neutron [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2368.126832] env[63279]: DEBUG nova.policy [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'efd8e2a39ad843b79762189fcc8aa8ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '996f8d6e14a14ac39f207eced547ef33', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2368.381470] env[63279]: DEBUG nova.network.neutron [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Successfully created port: be3c9abe-2016-4ca7-9982-16776729e694 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2368.593221] env[63279]: DEBUG nova.compute.manager [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2369.603043] env[63279]: DEBUG nova.compute.manager [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2369.628813] env[63279]: DEBUG nova.virt.hardware [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2369.629301] env[63279]: DEBUG nova.virt.hardware [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2369.629484] env[63279]: DEBUG nova.virt.hardware [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2369.629675] env[63279]: DEBUG nova.virt.hardware [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2369.629886] env[63279]: DEBUG nova.virt.hardware [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2369.629977] env[63279]: DEBUG nova.virt.hardware [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2369.630523] env[63279]: DEBUG nova.virt.hardware [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2369.630523] env[63279]: DEBUG nova.virt.hardware [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2369.630523] env[63279]: DEBUG nova.virt.hardware [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2369.630703] env[63279]: DEBUG nova.virt.hardware [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2369.630949] env[63279]: DEBUG nova.virt.hardware [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2369.632308] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e966285c-4dcc-4dd2-9530-8d1a5b6c274d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2369.636238] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2369.636375] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2369.636556] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleting the datastore file [datastore1] 1fca4e5c-fe2c-4b61-bed4-52c7770def7c {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2369.637201] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ccae6989-79f5-47a3-b3fb-9de413401ad6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2369.644276] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1339c67a-356f-4974-8409-af5c9f379c2f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2369.649121] env[63279]: DEBUG oslo_vmware.api [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2369.649121] env[63279]: value = "task-2088121" [ 2369.649121] env[63279]: _type = "Task" [ 2369.649121] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2369.664452] env[63279]: DEBUG oslo_vmware.api [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088121, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2370.061474] env[63279]: DEBUG oslo_concurrency.lockutils [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "efddec10-b496-446e-a05a-72c9f2d86ed9" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.061768] env[63279]: DEBUG oslo_concurrency.lockutils [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2370.061946] env[63279]: INFO nova.compute.manager [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Shelving [ 2370.159315] env[63279]: DEBUG oslo_vmware.api [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088121, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125632} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2370.159595] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2370.159784] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2370.159967] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2370.160158] env[63279]: INFO nova.compute.manager [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Took 4.99 seconds to destroy the instance on the hypervisor. [ 2370.160400] env[63279]: DEBUG oslo.service.loopingcall [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2370.160591] env[63279]: DEBUG nova.compute.manager [-] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2370.160687] env[63279]: DEBUG nova.network.neutron [-] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2370.411103] env[63279]: DEBUG nova.compute.manager [req-e824f4d6-8e3d-4f7c-8761-86368864cd44 req-aa0b66d3-0ed5-4d79-9917-dcd53c3e42cd service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Received event network-vif-plugged-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2370.411290] env[63279]: DEBUG oslo_concurrency.lockutils [req-e824f4d6-8e3d-4f7c-8761-86368864cd44 req-aa0b66d3-0ed5-4d79-9917-dcd53c3e42cd service nova] Acquiring lock "6f839780-be92-4d99-a96d-1fc14c819599-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.411595] env[63279]: DEBUG oslo_concurrency.lockutils [req-e824f4d6-8e3d-4f7c-8761-86368864cd44 req-aa0b66d3-0ed5-4d79-9917-dcd53c3e42cd service nova] Lock "6f839780-be92-4d99-a96d-1fc14c819599-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2370.411684] env[63279]: DEBUG oslo_concurrency.lockutils [req-e824f4d6-8e3d-4f7c-8761-86368864cd44 req-aa0b66d3-0ed5-4d79-9917-dcd53c3e42cd service nova] Lock "6f839780-be92-4d99-a96d-1fc14c819599-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2370.411837] env[63279]: DEBUG nova.compute.manager [req-e824f4d6-8e3d-4f7c-8761-86368864cd44 req-aa0b66d3-0ed5-4d79-9917-dcd53c3e42cd service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] No waiting events found dispatching network-vif-plugged-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2370.412039] env[63279]: WARNING nova.compute.manager [req-e824f4d6-8e3d-4f7c-8761-86368864cd44 req-aa0b66d3-0ed5-4d79-9917-dcd53c3e42cd service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Received unexpected event network-vif-plugged-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032 for instance with vm_state active and task_state None. [ 2370.545073] env[63279]: DEBUG nova.network.neutron [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Successfully updated port: 5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2370.689619] env[63279]: DEBUG nova.compute.manager [req-9614356e-429f-4061-99b9-71039931d9d8 req-b02d109f-7bfe-4804-ac26-6d02699cb772 service nova] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Received event network-vif-plugged-a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2370.690289] env[63279]: DEBUG oslo_concurrency.lockutils [req-9614356e-429f-4061-99b9-71039931d9d8 req-b02d109f-7bfe-4804-ac26-6d02699cb772 service nova] Acquiring lock "211f30e0-f5ab-4c05-975a-63fdc4fed7c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.690289] env[63279]: DEBUG oslo_concurrency.lockutils [req-9614356e-429f-4061-99b9-71039931d9d8 req-b02d109f-7bfe-4804-ac26-6d02699cb772 service nova] Lock "211f30e0-f5ab-4c05-975a-63fdc4fed7c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2370.690289] env[63279]: DEBUG oslo_concurrency.lockutils [req-9614356e-429f-4061-99b9-71039931d9d8 req-b02d109f-7bfe-4804-ac26-6d02699cb772 service nova] Lock "211f30e0-f5ab-4c05-975a-63fdc4fed7c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2370.690689] env[63279]: DEBUG nova.compute.manager [req-9614356e-429f-4061-99b9-71039931d9d8 req-b02d109f-7bfe-4804-ac26-6d02699cb772 service nova] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] No waiting events found dispatching network-vif-plugged-a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2370.690689] env[63279]: WARNING nova.compute.manager [req-9614356e-429f-4061-99b9-71039931d9d8 req-b02d109f-7bfe-4804-ac26-6d02699cb772 service nova] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Received unexpected event network-vif-plugged-a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9 for instance with vm_state building and task_state spawning. [ 2370.867280] env[63279]: DEBUG nova.network.neutron [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Successfully updated port: be3c9abe-2016-4ca7-9982-16776729e694 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2370.898800] env[63279]: DEBUG nova.network.neutron [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Successfully updated port: a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2371.049187] env[63279]: DEBUG oslo_concurrency.lockutils [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2371.049397] env[63279]: DEBUG oslo_concurrency.lockutils [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2371.049585] env[63279]: DEBUG nova.network.neutron [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2371.080280] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2371.080280] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bbf98362-4705-4bd6-bf82-8db85586880b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.087529] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2371.087529] env[63279]: value = "task-2088122" [ 2371.087529] env[63279]: _type = "Task" [ 2371.087529] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2371.097601] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088122, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2371.339250] env[63279]: DEBUG nova.network.neutron [-] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2371.369090] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2371.369621] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2371.369621] env[63279]: DEBUG nova.network.neutron [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2371.401273] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "refresh_cache-211f30e0-f5ab-4c05-975a-63fdc4fed7c7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2371.401540] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "refresh_cache-211f30e0-f5ab-4c05-975a-63fdc4fed7c7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2371.401835] env[63279]: DEBUG nova.network.neutron [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2371.597689] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088122, 'name': PowerOffVM_Task, 'duration_secs': 0.230869} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2371.597964] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2371.598827] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc939eb3-1cb6-4335-a332-d547d65295d2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.601855] env[63279]: WARNING nova.network.neutron [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] e0e614b7-de4b-485e-8824-582faae8febd already exists in list: networks containing: ['e0e614b7-de4b-485e-8824-582faae8febd']. ignoring it [ 2371.618833] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77478904-91c8-4ac0-bb84-0a481a7a9bd8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.842813] env[63279]: INFO nova.compute.manager [-] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Took 1.68 seconds to deallocate network for instance. [ 2371.869470] env[63279]: DEBUG nova.network.neutron [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updating instance_info_cache with network_info: [{"id": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "address": "fa:16:3e:0c:f0:a6", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0192def-50", "ovs_interfaceid": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032", "address": "fa:16:3e:06:6c:96", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5426dc4d-6f", "ovs_interfaceid": "5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2371.900147] env[63279]: DEBUG nova.network.neutron [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2371.939449] env[63279]: DEBUG nova.network.neutron [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2372.059388] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2372.059882] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2372.074614] env[63279]: DEBUG nova.network.neutron [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance_info_cache with network_info: [{"id": "be3c9abe-2016-4ca7-9982-16776729e694", "address": "fa:16:3e:95:a8:19", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3c9abe-20", "ovs_interfaceid": "be3c9abe-2016-4ca7-9982-16776729e694", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2372.079892] env[63279]: DEBUG nova.network.neutron [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Updating instance_info_cache with network_info: [{"id": "a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9", "address": "fa:16:3e:86:f4:53", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3f9bcf2-6c", "ovs_interfaceid": "a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2372.129204] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2372.129509] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a7c7383a-940e-4c0d-870e-98000d82c365 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.137200] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2372.137200] env[63279]: value = "task-2088123" [ 2372.137200] env[63279]: _type = "Task" [ 2372.137200] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2372.147339] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088123, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.349395] env[63279]: DEBUG oslo_concurrency.lockutils [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2372.349670] env[63279]: DEBUG oslo_concurrency.lockutils [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2372.349889] env[63279]: DEBUG nova.objects.instance [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lazy-loading 'resources' on Instance uuid 1fca4e5c-fe2c-4b61-bed4-52c7770def7c {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2372.371801] env[63279]: DEBUG oslo_concurrency.lockutils [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2372.372524] env[63279]: DEBUG oslo_concurrency.lockutils [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2372.372699] env[63279]: DEBUG oslo_concurrency.lockutils [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2372.373590] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d910dc3-79ca-4136-b405-cb6eef584faa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.392356] env[63279]: DEBUG nova.virt.hardware [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2372.392600] env[63279]: DEBUG nova.virt.hardware [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2372.392760] env[63279]: DEBUG nova.virt.hardware [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2372.392949] env[63279]: DEBUG nova.virt.hardware [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2372.393121] env[63279]: DEBUG nova.virt.hardware [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2372.393281] env[63279]: DEBUG nova.virt.hardware [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2372.393501] env[63279]: DEBUG nova.virt.hardware [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2372.393708] env[63279]: DEBUG nova.virt.hardware [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2372.393824] env[63279]: DEBUG nova.virt.hardware [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2372.393989] env[63279]: DEBUG nova.virt.hardware [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2372.394178] env[63279]: DEBUG nova.virt.hardware [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2372.401033] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Reconfiguring VM to attach interface {{(pid=63279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 2372.401183] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a54cf972-96e7-411e-89b0-d56e2c7cd420 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.418428] env[63279]: DEBUG oslo_vmware.api [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2372.418428] env[63279]: value = "task-2088124" [ 2372.418428] env[63279]: _type = "Task" [ 2372.418428] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2372.426094] env[63279]: DEBUG oslo_vmware.api [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088124, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.471356] env[63279]: DEBUG nova.compute.manager [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Received event network-changed-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2372.471356] env[63279]: DEBUG nova.compute.manager [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Refreshing instance network info cache due to event network-changed-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2372.471356] env[63279]: DEBUG oslo_concurrency.lockutils [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] Acquiring lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2372.471478] env[63279]: DEBUG oslo_concurrency.lockutils [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] Acquired lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2372.471602] env[63279]: DEBUG nova.network.neutron [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Refreshing network info cache for port 5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2372.563366] env[63279]: INFO nova.compute.manager [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Detaching volume 7aed4a6d-1348-48e4-857d-eed399e897d8 [ 2372.577144] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2372.577446] env[63279]: DEBUG nova.compute.manager [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Instance network_info: |[{"id": "be3c9abe-2016-4ca7-9982-16776729e694", "address": "fa:16:3e:95:a8:19", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3c9abe-20", "ovs_interfaceid": "be3c9abe-2016-4ca7-9982-16776729e694", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2372.577880] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:a8:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be3c9abe-2016-4ca7-9982-16776729e694', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2372.585557] env[63279]: DEBUG oslo.service.loopingcall [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2372.585934] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "refresh_cache-211f30e0-f5ab-4c05-975a-63fdc4fed7c7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2372.586205] env[63279]: DEBUG nova.compute.manager [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Instance network_info: |[{"id": "a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9", "address": "fa:16:3e:86:f4:53", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3f9bcf2-6c", "ovs_interfaceid": "a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2372.586457] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2372.586804] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:f4:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9fa933df-d66f-485e-8cf9-eda7f1a7f283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2372.593767] env[63279]: DEBUG oslo.service.loopingcall [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2372.593950] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f95a0f85-e34f-47e8-9b1e-866abb0de7c9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.608961] env[63279]: INFO nova.virt.block_device [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Attempting to driver detach volume 7aed4a6d-1348-48e4-857d-eed399e897d8 from mountpoint /dev/sdb [ 2372.609196] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2372.609381] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427776', 'volume_id': '7aed4a6d-1348-48e4-857d-eed399e897d8', 'name': 'volume-7aed4a6d-1348-48e4-857d-eed399e897d8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '19e10ee4-99d1-44b9-9354-4c162d541a1f', 'attached_at': '', 'detached_at': '', 'volume_id': '7aed4a6d-1348-48e4-857d-eed399e897d8', 'serial': '7aed4a6d-1348-48e4-857d-eed399e897d8'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2372.609641] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2372.610664] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8bd19dd-512c-4956-aace-7955131a6d8a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.613324] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bcc933e6-d08a-4bf9-890d-d2cdfb4a48fc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.649505] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5228356-d9af-454c-9f4f-78c8d586bf7f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.651764] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2372.651764] env[63279]: value = "task-2088126" [ 2372.651764] env[63279]: _type = "Task" [ 2372.651764] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2372.651972] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2372.651972] env[63279]: value = "task-2088125" [ 2372.651972] env[63279]: _type = "Task" [ 2372.651972] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2372.662482] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088123, 'name': CreateSnapshot_Task, 'duration_secs': 0.426936} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2372.663644] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b61443-443c-4162-877a-4f2b44560d59 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.665831] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2372.671703] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67b87f2-6f0c-4798-b37e-3b53c0f2b9d4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.674111] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088126, 'name': CreateVM_Task} progress is 15%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.674271] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088125, 'name': CreateVM_Task} progress is 15%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.700521] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d6fae0-83c6-417f-952e-77fb38009649 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.720163] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] The volume has not been displaced from its original location: [datastore1] volume-7aed4a6d-1348-48e4-857d-eed399e897d8/volume-7aed4a6d-1348-48e4-857d-eed399e897d8.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2372.730029] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2372.730823] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af13411a-d258-4be0-ba3d-5eee568264b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.755590] env[63279]: DEBUG nova.compute.manager [req-c54932ac-ef38-499d-88e7-fc94b47fd362 req-a46b66e0-c482-43e4-8d22-c90f41ef92e3 service nova] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Received event network-vif-deleted-1e0ac67c-3039-4c36-831a-d32977fcab32 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2372.755883] env[63279]: DEBUG nova.compute.manager [req-c54932ac-ef38-499d-88e7-fc94b47fd362 req-a46b66e0-c482-43e4-8d22-c90f41ef92e3 service nova] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Received event network-changed-a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2372.756139] env[63279]: DEBUG nova.compute.manager [req-c54932ac-ef38-499d-88e7-fc94b47fd362 req-a46b66e0-c482-43e4-8d22-c90f41ef92e3 service nova] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Refreshing instance network info cache due to event network-changed-a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2372.756426] env[63279]: DEBUG oslo_concurrency.lockutils [req-c54932ac-ef38-499d-88e7-fc94b47fd362 req-a46b66e0-c482-43e4-8d22-c90f41ef92e3 service nova] Acquiring lock "refresh_cache-211f30e0-f5ab-4c05-975a-63fdc4fed7c7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2372.756634] env[63279]: DEBUG oslo_concurrency.lockutils [req-c54932ac-ef38-499d-88e7-fc94b47fd362 req-a46b66e0-c482-43e4-8d22-c90f41ef92e3 service nova] Acquired lock "refresh_cache-211f30e0-f5ab-4c05-975a-63fdc4fed7c7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2372.756859] env[63279]: DEBUG nova.network.neutron [req-c54932ac-ef38-499d-88e7-fc94b47fd362 req-a46b66e0-c482-43e4-8d22-c90f41ef92e3 service nova] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Refreshing network info cache for port a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2372.763920] env[63279]: DEBUG oslo_vmware.api [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2372.763920] env[63279]: value = "task-2088127" [ 2372.763920] env[63279]: _type = "Task" [ 2372.763920] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2372.772807] env[63279]: DEBUG oslo_vmware.api [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088127, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.929578] env[63279]: DEBUG oslo_vmware.api [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.004733] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a3020c-d9e9-4e6c-9243-9e462198dcc1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.014595] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f94aa2-3da2-4cf0-854e-26e3762f6251 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.044857] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e8e97b-7c77-45b2-afa4-bfd8b776095a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.052361] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b02371-a45b-43f1-ba6d-086ca17344df {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.070082] env[63279]: DEBUG nova.compute.provider_tree [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2373.164289] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088126, 'name': CreateVM_Task, 'duration_secs': 0.506617} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.169399] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2373.169620] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088125, 'name': CreateVM_Task, 'duration_secs': 0.48579} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.170291] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2373.170457] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2373.170782] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2373.170980] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2373.171478] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27f7d668-b148-4a16-a569-233daf05b993 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.173299] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2373.176606] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2373.176606] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f5e17f-0d4f-16ad-6aa3-359d58a5d50e" [ 2373.176606] env[63279]: _type = "Task" [ 2373.176606] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.183707] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f5e17f-0d4f-16ad-6aa3-359d58a5d50e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.205530] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2373.206530] env[63279]: DEBUG nova.network.neutron [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updated VIF entry in instance network info cache for port 5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2373.206947] env[63279]: DEBUG nova.network.neutron [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updating instance_info_cache with network_info: [{"id": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "address": "fa:16:3e:0c:f0:a6", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0192def-50", "ovs_interfaceid": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032", "address": "fa:16:3e:06:6c:96", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5426dc4d-6f", "ovs_interfaceid": "5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2373.208097] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-bf24b39e-3928-42fe-b5f7-8df2e7c1f09a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.216972] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2373.216972] env[63279]: value = "task-2088128" [ 2373.216972] env[63279]: _type = "Task" [ 2373.216972] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.226415] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088128, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.274176] env[63279]: DEBUG oslo_vmware.api [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088127, 'name': ReconfigVM_Task, 'duration_secs': 0.195494} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.274270] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2373.279411] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e58f36d-e08a-4a36-8adc-a0d788c9f2d7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.296255] env[63279]: DEBUG oslo_vmware.api [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2373.296255] env[63279]: value = "task-2088129" [ 2373.296255] env[63279]: _type = "Task" [ 2373.296255] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.304480] env[63279]: DEBUG oslo_vmware.api [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088129, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.431855] env[63279]: DEBUG oslo_vmware.api [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088124, 'name': ReconfigVM_Task, 'duration_secs': 0.742223} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.432463] env[63279]: DEBUG oslo_concurrency.lockutils [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2373.432673] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Reconfigured VM to attach interface {{(pid=63279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 2373.484080] env[63279]: DEBUG nova.network.neutron [req-c54932ac-ef38-499d-88e7-fc94b47fd362 req-a46b66e0-c482-43e4-8d22-c90f41ef92e3 service nova] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Updated VIF entry in instance network info cache for port a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2373.484512] env[63279]: DEBUG nova.network.neutron [req-c54932ac-ef38-499d-88e7-fc94b47fd362 req-a46b66e0-c482-43e4-8d22-c90f41ef92e3 service nova] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Updating instance_info_cache with network_info: [{"id": "a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9", "address": "fa:16:3e:86:f4:53", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3f9bcf2-6c", "ovs_interfaceid": "a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2373.573395] env[63279]: DEBUG nova.scheduler.client.report [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2373.687374] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f5e17f-0d4f-16ad-6aa3-359d58a5d50e, 'name': SearchDatastore_Task, 'duration_secs': 0.012533} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.687683] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2373.687915] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2373.688170] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2373.688323] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2373.688503] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2373.688791] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2373.689111] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2373.689340] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3cef565-8126-44d7-9476-c3c6a036fee2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.691184] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb12cf4f-a611-4e71-8494-22edb7ff65b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.695639] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2373.695639] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523c153a-8b83-3e06-2c8a-df4058a3210a" [ 2373.695639] env[63279]: _type = "Task" [ 2373.695639] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.699015] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2373.699193] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2373.700091] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13fd7a0d-3951-4a48-84e6-7c3118428723 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.705928] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523c153a-8b83-3e06-2c8a-df4058a3210a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.708810] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2373.708810] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52e45e8a-1971-ada9-a5d8-61746c6f684a" [ 2373.708810] env[63279]: _type = "Task" [ 2373.708810] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.712017] env[63279]: DEBUG oslo_concurrency.lockutils [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] Releasing lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2373.712222] env[63279]: DEBUG nova.compute.manager [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Received event network-vif-plugged-be3c9abe-2016-4ca7-9982-16776729e694 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2373.712408] env[63279]: DEBUG oslo_concurrency.lockutils [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] Acquiring lock "246b5346-93b1-4a84-921c-d028f3554d3d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2373.712611] env[63279]: DEBUG oslo_concurrency.lockutils [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] Lock "246b5346-93b1-4a84-921c-d028f3554d3d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2373.712771] env[63279]: DEBUG oslo_concurrency.lockutils [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] Lock "246b5346-93b1-4a84-921c-d028f3554d3d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2373.712940] env[63279]: DEBUG nova.compute.manager [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] No waiting events found dispatching network-vif-plugged-be3c9abe-2016-4ca7-9982-16776729e694 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2373.713145] env[63279]: WARNING nova.compute.manager [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Received unexpected event network-vif-plugged-be3c9abe-2016-4ca7-9982-16776729e694 for instance with vm_state building and task_state spawning. [ 2373.713330] env[63279]: DEBUG nova.compute.manager [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Received event network-changed-be3c9abe-2016-4ca7-9982-16776729e694 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2373.713489] env[63279]: DEBUG nova.compute.manager [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Refreshing instance network info cache due to event network-changed-be3c9abe-2016-4ca7-9982-16776729e694. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2373.713674] env[63279]: DEBUG oslo_concurrency.lockutils [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] Acquiring lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2373.713818] env[63279]: DEBUG oslo_concurrency.lockutils [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] Acquired lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2373.713972] env[63279]: DEBUG nova.network.neutron [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Refreshing network info cache for port be3c9abe-2016-4ca7-9982-16776729e694 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2373.717540] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e45e8a-1971-ada9-a5d8-61746c6f684a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.725421] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088128, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.805956] env[63279]: DEBUG oslo_vmware.api [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088129, 'name': ReconfigVM_Task, 'duration_secs': 0.310422} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.806277] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427776', 'volume_id': '7aed4a6d-1348-48e4-857d-eed399e897d8', 'name': 'volume-7aed4a6d-1348-48e4-857d-eed399e897d8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '19e10ee4-99d1-44b9-9354-4c162d541a1f', 'attached_at': '', 'detached_at': '', 'volume_id': '7aed4a6d-1348-48e4-857d-eed399e897d8', 'serial': '7aed4a6d-1348-48e4-857d-eed399e897d8'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2373.941582] env[63279]: DEBUG oslo_concurrency.lockutils [None req-331cf398-3de2-41fb-ab80-1d976a7a1c78 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-6f839780-be92-4d99-a96d-1fc14c819599-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.101s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2373.987634] env[63279]: DEBUG oslo_concurrency.lockutils [req-c54932ac-ef38-499d-88e7-fc94b47fd362 req-a46b66e0-c482-43e4-8d22-c90f41ef92e3 service nova] Releasing lock "refresh_cache-211f30e0-f5ab-4c05-975a-63fdc4fed7c7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2374.078847] env[63279]: DEBUG oslo_concurrency.lockutils [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.729s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2374.099614] env[63279]: INFO nova.scheduler.client.report [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleted allocations for instance 1fca4e5c-fe2c-4b61-bed4-52c7770def7c [ 2374.205846] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523c153a-8b83-3e06-2c8a-df4058a3210a, 'name': SearchDatastore_Task, 'duration_secs': 0.00941} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2374.206174] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2374.206409] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2374.206623] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2374.220311] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e45e8a-1971-ada9-a5d8-61746c6f684a, 'name': SearchDatastore_Task, 'duration_secs': 0.007981} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2374.224075] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c79ca4a0-ed43-4570-a622-ab3d036e6792 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.233422] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2374.233422] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529704ad-65d2-b320-600c-51b08cf436b0" [ 2374.233422] env[63279]: _type = "Task" [ 2374.233422] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2374.233679] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088128, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.241719] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529704ad-65d2-b320-600c-51b08cf436b0, 'name': SearchDatastore_Task, 'duration_secs': 0.010355} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2374.241956] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2374.242239] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 211f30e0-f5ab-4c05-975a-63fdc4fed7c7/211f30e0-f5ab-4c05-975a-63fdc4fed7c7.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2374.242505] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2374.242693] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2374.242995] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ccedd69-d2bb-4ae3-960f-29c1657b90be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.244611] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76122245-b166-4910-b5d2-43b43a6748fc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.250691] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2374.250691] env[63279]: value = "task-2088130" [ 2374.250691] env[63279]: _type = "Task" [ 2374.250691] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2374.256569] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2374.256737] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2374.257758] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b7aa21a-239e-4e7f-a2c0-3455d3734357 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.263421] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088130, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.266030] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2374.266030] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523afaec-e4f7-ff71-7d20-142aa5570fa4" [ 2374.266030] env[63279]: _type = "Task" [ 2374.266030] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2374.273038] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523afaec-e4f7-ff71-7d20-142aa5570fa4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.347072] env[63279]: DEBUG nova.objects.instance [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lazy-loading 'flavor' on Instance uuid 19e10ee4-99d1-44b9-9354-4c162d541a1f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2374.435194] env[63279]: DEBUG nova.network.neutron [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updated VIF entry in instance network info cache for port be3c9abe-2016-4ca7-9982-16776729e694. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2374.435583] env[63279]: DEBUG nova.network.neutron [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance_info_cache with network_info: [{"id": "be3c9abe-2016-4ca7-9982-16776729e694", "address": "fa:16:3e:95:a8:19", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3c9abe-20", "ovs_interfaceid": "be3c9abe-2016-4ca7-9982-16776729e694", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2374.607727] env[63279]: DEBUG oslo_concurrency.lockutils [None req-725b78cc-b60b-460f-94c6-55a5d58bf23a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "1fca4e5c-fe2c-4b61-bed4-52c7770def7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.943s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2374.732177] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088128, 'name': CloneVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.765388] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088130, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.776051] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523afaec-e4f7-ff71-7d20-142aa5570fa4, 'name': SearchDatastore_Task, 'duration_secs': 0.008464} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2374.776854] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-312f92c4-2626-4fa1-85fe-24bd768dd398 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.782009] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2374.782009] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523ab245-c6d0-d792-7ff9-36124b09ef66" [ 2374.782009] env[63279]: _type = "Task" [ 2374.782009] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2374.790346] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523ab245-c6d0-d792-7ff9-36124b09ef66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.939030] env[63279]: DEBUG oslo_concurrency.lockutils [req-441afef1-5416-4142-9736-4afa26a0fd19 req-d5e2a4fb-1be3-409d-b1fd-8b277c39cf0f service nova] Releasing lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2375.231329] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088128, 'name': CloneVM_Task, 'duration_secs': 1.563537} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2375.231329] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Created linked-clone VM from snapshot [ 2375.232298] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96141b7-b5dd-4031-a7fd-9340fb90ce27 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.239567] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Uploading image 4faf9f78-8c3c-4094-a5ab-792d6ef75b92 {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2375.252809] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2e79dbc4-afe0-41d9-8dcc-7c81bd12b678 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2375.264234] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088130, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534113} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2375.265988] env[63279]: DEBUG oslo_vmware.rw_handles [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2375.265988] env[63279]: value = "vm-427791" [ 2375.265988] env[63279]: _type = "VirtualMachine" [ 2375.265988] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2375.266268] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 211f30e0-f5ab-4c05-975a-63fdc4fed7c7/211f30e0-f5ab-4c05-975a-63fdc4fed7c7.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2375.266479] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2375.266700] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-325114c3-afc2-46fc-a2f8-b6854a0e0dcf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.268067] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa7f1fde-5324-466e-8034-e8c118a63bbd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.274447] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2375.274447] env[63279]: value = "task-2088131" [ 2375.274447] env[63279]: _type = "Task" [ 2375.274447] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2375.275611] env[63279]: DEBUG oslo_vmware.rw_handles [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lease: (returnval){ [ 2375.275611] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52aa3fd7-83f0-0871-49ef-4b50759a770a" [ 2375.275611] env[63279]: _type = "HttpNfcLease" [ 2375.275611] env[63279]: } obtained for exporting VM: (result){ [ 2375.275611] env[63279]: value = "vm-427791" [ 2375.275611] env[63279]: _type = "VirtualMachine" [ 2375.275611] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2375.275893] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the lease: (returnval){ [ 2375.275893] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52aa3fd7-83f0-0871-49ef-4b50759a770a" [ 2375.275893] env[63279]: _type = "HttpNfcLease" [ 2375.275893] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2375.288816] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088131, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2375.290393] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2375.290393] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52aa3fd7-83f0-0871-49ef-4b50759a770a" [ 2375.290393] env[63279]: _type = "HttpNfcLease" [ 2375.290393] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2375.295629] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523ab245-c6d0-d792-7ff9-36124b09ef66, 'name': SearchDatastore_Task, 'duration_secs': 0.012367} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2375.295880] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2375.296163] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 246b5346-93b1-4a84-921c-d028f3554d3d/246b5346-93b1-4a84-921c-d028f3554d3d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2375.296415] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28dc6eea-1da2-43e3-ae53-744aa6f9a72b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.302456] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2375.302456] env[63279]: value = "task-2088133" [ 2375.302456] env[63279]: _type = "Task" [ 2375.302456] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2375.310196] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088133, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2375.355077] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f41695c2-d3eb-4084-840a-8f818ab55fd2 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.295s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2375.356104] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2e79dbc4-afe0-41d9-8dcc-7c81bd12b678 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.103s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2375.356313] env[63279]: DEBUG nova.compute.manager [None req-2e79dbc4-afe0-41d9-8dcc-7c81bd12b678 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2375.357363] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61bdd03-0257-4f45-a21d-ea7510bf2530 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.364017] env[63279]: DEBUG nova.compute.manager [None req-2e79dbc4-afe0-41d9-8dcc-7c81bd12b678 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2375.364909] env[63279]: DEBUG nova.objects.instance [None req-2e79dbc4-afe0-41d9-8dcc-7c81bd12b678 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lazy-loading 'flavor' on Instance uuid 19e10ee4-99d1-44b9-9354-4c162d541a1f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2375.539130] env[63279]: DEBUG oslo_concurrency.lockutils [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "interface-6f839780-be92-4d99-a96d-1fc14c819599-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2375.539538] env[63279]: DEBUG oslo_concurrency.lockutils [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-6f839780-be92-4d99-a96d-1fc14c819599-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2375.792753] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088131, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061225} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2375.798630] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2375.799009] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2375.799009] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52aa3fd7-83f0-0871-49ef-4b50759a770a" [ 2375.799009] env[63279]: _type = "HttpNfcLease" [ 2375.799009] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2375.799700] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf52c0f-1f83-46cb-ab29-38641e35b679 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.803617] env[63279]: DEBUG oslo_vmware.rw_handles [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2375.803617] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52aa3fd7-83f0-0871-49ef-4b50759a770a" [ 2375.803617] env[63279]: _type = "HttpNfcLease" [ 2375.803617] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2375.804673] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce5ff57-b3cf-48ae-b0cb-addf018b1073 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.827264] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088133, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503805} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2375.827529] env[63279]: DEBUG oslo_vmware.rw_handles [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e214e2-c653-2d92-389f-35d8cc0e20ae/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2375.827695] env[63279]: DEBUG oslo_vmware.rw_handles [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e214e2-c653-2d92-389f-35d8cc0e20ae/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2375.837734] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 211f30e0-f5ab-4c05-975a-63fdc4fed7c7/211f30e0-f5ab-4c05-975a-63fdc4fed7c7.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2375.838872] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 246b5346-93b1-4a84-921c-d028f3554d3d/246b5346-93b1-4a84-921c-d028f3554d3d.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2375.839123] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2375.839355] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7d1e043-4609-4902-9fdf-e8695a632f5c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.907365] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9910b7d8-a089-48f3-96b3-92f9affa4288 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.918559] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2375.918559] env[63279]: value = "task-2088135" [ 2375.918559] env[63279]: _type = "Task" [ 2375.918559] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2375.919733] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2375.919733] env[63279]: value = "task-2088134" [ 2375.919733] env[63279]: _type = "Task" [ 2375.919733] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2375.933137] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088134, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2375.940452] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c49173dc-5654-4c77-a30b-4e75a240ed31 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.042354] env[63279]: DEBUG oslo_concurrency.lockutils [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2376.042596] env[63279]: DEBUG oslo_concurrency.lockutils [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2376.043586] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671f3b12-5ed0-4031-8aa9-3dd5ededd961 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.063379] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f87ff6a-6781-4394-afe6-4d1a641683e9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.095625] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Reconfiguring VM to detach interface {{(pid=63279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 2376.096591] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d54c4b3f-c80c-45ee-b2c7-88c0b488a040 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.116224] env[63279]: DEBUG oslo_vmware.api [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2376.116224] env[63279]: value = "task-2088136" [ 2376.116224] env[63279]: _type = "Task" [ 2376.116224] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2376.124537] env[63279]: DEBUG oslo_vmware.api [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088136, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2376.414673] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e79dbc4-afe0-41d9-8dcc-7c81bd12b678 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2376.414955] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-768f4bdf-858e-4d0d-b5da-e82530174982 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.424395] env[63279]: DEBUG oslo_vmware.api [None req-2e79dbc4-afe0-41d9-8dcc-7c81bd12b678 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2376.424395] env[63279]: value = "task-2088137" [ 2376.424395] env[63279]: _type = "Task" [ 2376.424395] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2376.434314] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088135, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091319} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2376.435082] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2376.436060] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e98f0f-af66-44c7-90d0-41c11dcfcfbf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.444746] env[63279]: DEBUG oslo_vmware.api [None req-2e79dbc4-afe0-41d9-8dcc-7c81bd12b678 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088137, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2376.445138] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088134, 'name': ReconfigVM_Task, 'duration_secs': 0.286697} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2376.445840] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 211f30e0-f5ab-4c05-975a-63fdc4fed7c7/211f30e0-f5ab-4c05-975a-63fdc4fed7c7.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2376.446534] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-957c8aeb-034f-42ca-b4f1-14c47e720b3b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.465805] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 246b5346-93b1-4a84-921c-d028f3554d3d/246b5346-93b1-4a84-921c-d028f3554d3d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2376.466632] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a8e1613-1031-4a74-a517-7cde9e4e43ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.482525] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2376.482525] env[63279]: value = "task-2088138" [ 2376.482525] env[63279]: _type = "Task" [ 2376.482525] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2376.487796] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2376.487796] env[63279]: value = "task-2088139" [ 2376.487796] env[63279]: _type = "Task" [ 2376.487796] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2376.493795] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088138, 'name': Rename_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2376.499118] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088139, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2376.575958] env[63279]: DEBUG nova.compute.manager [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Stashing vm_state: active {{(pid=63279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2376.627495] env[63279]: DEBUG oslo_vmware.api [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2376.938604] env[63279]: DEBUG oslo_vmware.api [None req-2e79dbc4-afe0-41d9-8dcc-7c81bd12b678 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088137, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2376.991986] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088138, 'name': Rename_Task, 'duration_secs': 0.143167} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2376.996905] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2376.997231] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5aececfc-8a62-4683-8f1c-9618f551e696 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.003741] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088139, 'name': ReconfigVM_Task, 'duration_secs': 0.277792} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2377.005046] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 246b5346-93b1-4a84-921c-d028f3554d3d/246b5346-93b1-4a84-921c-d028f3554d3d.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2377.005764] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2377.005764] env[63279]: value = "task-2088140" [ 2377.005764] env[63279]: _type = "Task" [ 2377.005764] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2377.006016] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-56d31037-b8f0-4a29-956c-85c9b5ae2883 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.014251] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2377.014251] env[63279]: value = "task-2088141" [ 2377.014251] env[63279]: _type = "Task" [ 2377.014251] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2377.696737] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088140, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2377.697123] env[63279]: WARNING oslo_vmware.common.loopingcall [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] task run outlasted interval by 0.18925400000000003 sec [ 2377.709542] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088141, 'name': Rename_Task, 'duration_secs': 0.172895} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2377.710198] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2377.716654] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97f82503-4663-4a3b-afac-7defd80a8b18 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.717454] env[63279]: DEBUG oslo_vmware.api [None req-2e79dbc4-afe0-41d9-8dcc-7c81bd12b678 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088137, 'name': PowerOffVM_Task, 'duration_secs': 1.228878} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2377.717779] env[63279]: DEBUG oslo_vmware.api [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2377.721384] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2377.721699] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2377.723826] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e79dbc4-afe0-41d9-8dcc-7c81bd12b678 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2377.723826] env[63279]: DEBUG nova.compute.manager [None req-2e79dbc4-afe0-41d9-8dcc-7c81bd12b678 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2377.724173] env[63279]: DEBUG oslo_vmware.api [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088140, 'name': PowerOnVM_Task, 'duration_secs': 0.453592} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2377.725614] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28ef33a-2ca1-435e-a11f-1bf71f341f05 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.728245] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2377.728478] env[63279]: INFO nova.compute.manager [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Took 10.33 seconds to spawn the instance on the hypervisor. [ 2377.728694] env[63279]: DEBUG nova.compute.manager [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2377.730573] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702f592f-2727-4cfa-9fb8-1f2408f97c72 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.733484] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2377.733484] env[63279]: value = "task-2088142" [ 2377.733484] env[63279]: _type = "Task" [ 2377.733484] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2377.750273] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088142, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2378.205484] env[63279]: DEBUG oslo_vmware.api [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2378.227786] env[63279]: INFO nova.compute.claims [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2378.248992] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088142, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2378.252576] env[63279]: DEBUG oslo_concurrency.lockutils [None req-2e79dbc4-afe0-41d9-8dcc-7c81bd12b678 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.896s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2378.253843] env[63279]: INFO nova.compute.manager [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Took 15.99 seconds to build instance. [ 2378.707166] env[63279]: DEBUG oslo_vmware.api [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2378.735400] env[63279]: INFO nova.compute.resource_tracker [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating resource usage from migration 745b9969-60e3-4261-828f-50d29b1a6544 [ 2378.748398] env[63279]: DEBUG oslo_vmware.api [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088142, 'name': PowerOnVM_Task, 'duration_secs': 0.683475} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2378.748535] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2378.748871] env[63279]: INFO nova.compute.manager [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Took 9.15 seconds to spawn the instance on the hypervisor. [ 2378.748967] env[63279]: DEBUG nova.compute.manager [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2378.751052] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f9cf33-57dd-4af4-87ea-b6b188c6d862 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.761950] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb9173fa-57f0-4288-972f-2634aba77f81 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "211f30e0-f5ab-4c05-975a-63fdc4fed7c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.503s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2378.848253] env[63279]: DEBUG nova.objects.instance [None req-59def4ca-3d00-4f64-89f0-4fa6460408be tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lazy-loading 'flavor' on Instance uuid 19e10ee4-99d1-44b9-9354-4c162d541a1f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2378.873522] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c80663-e9dd-4c0f-9a92-a776046248c0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.881914] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b36f8f9-36b1-405f-ba4b-fda74a79c784 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.912852] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245ea153-f1b3-4926-9f8d-1cb5d3b83a74 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.920733] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6415d22-acea-4750-87ca-25946a72eda8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.934567] env[63279]: DEBUG nova.compute.provider_tree [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2379.208314] env[63279]: DEBUG oslo_vmware.api [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2379.270076] env[63279]: INFO nova.compute.manager [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Took 14.80 seconds to build instance. [ 2379.353773] env[63279]: DEBUG oslo_concurrency.lockutils [None req-59def4ca-3d00-4f64-89f0-4fa6460408be tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2379.353992] env[63279]: DEBUG oslo_concurrency.lockutils [None req-59def4ca-3d00-4f64-89f0-4fa6460408be tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquired lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2379.355029] env[63279]: DEBUG nova.network.neutron [None req-59def4ca-3d00-4f64-89f0-4fa6460408be tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2379.355029] env[63279]: DEBUG nova.objects.instance [None req-59def4ca-3d00-4f64-89f0-4fa6460408be tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lazy-loading 'info_cache' on Instance uuid 19e10ee4-99d1-44b9-9354-4c162d541a1f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2379.437678] env[63279]: DEBUG nova.scheduler.client.report [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2379.590395] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "211f30e0-f5ab-4c05-975a-63fdc4fed7c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2379.590616] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "211f30e0-f5ab-4c05-975a-63fdc4fed7c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2379.590868] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "211f30e0-f5ab-4c05-975a-63fdc4fed7c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2379.591105] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "211f30e0-f5ab-4c05-975a-63fdc4fed7c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2379.591287] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "211f30e0-f5ab-4c05-975a-63fdc4fed7c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2379.593427] env[63279]: INFO nova.compute.manager [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Terminating instance [ 2379.707707] env[63279]: DEBUG oslo_vmware.api [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2379.774215] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f6c4750e-6ea5-467e-9952-01821e2d9756 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "246b5346-93b1-4a84-921c-d028f3554d3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.308s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2379.814522] env[63279]: DEBUG nova.compute.manager [req-12cd1219-7f1b-4b67-a21b-3dda3659dde0 req-56f61346-bbb0-464e-8c3d-79c8ab41d75f service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Received event network-changed-be3c9abe-2016-4ca7-9982-16776729e694 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2379.814746] env[63279]: DEBUG nova.compute.manager [req-12cd1219-7f1b-4b67-a21b-3dda3659dde0 req-56f61346-bbb0-464e-8c3d-79c8ab41d75f service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Refreshing instance network info cache due to event network-changed-be3c9abe-2016-4ca7-9982-16776729e694. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2379.814943] env[63279]: DEBUG oslo_concurrency.lockutils [req-12cd1219-7f1b-4b67-a21b-3dda3659dde0 req-56f61346-bbb0-464e-8c3d-79c8ab41d75f service nova] Acquiring lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2379.815141] env[63279]: DEBUG oslo_concurrency.lockutils [req-12cd1219-7f1b-4b67-a21b-3dda3659dde0 req-56f61346-bbb0-464e-8c3d-79c8ab41d75f service nova] Acquired lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2379.815262] env[63279]: DEBUG nova.network.neutron [req-12cd1219-7f1b-4b67-a21b-3dda3659dde0 req-56f61346-bbb0-464e-8c3d-79c8ab41d75f service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Refreshing network info cache for port be3c9abe-2016-4ca7-9982-16776729e694 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2379.857906] env[63279]: DEBUG nova.objects.base [None req-59def4ca-3d00-4f64-89f0-4fa6460408be tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Object Instance<19e10ee4-99d1-44b9-9354-4c162d541a1f> lazy-loaded attributes: flavor,info_cache {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2379.943191] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.221s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2379.943407] env[63279]: INFO nova.compute.manager [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Migrating [ 2380.097202] env[63279]: DEBUG nova.compute.manager [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2380.097570] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2380.098722] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa3bcab-1a85-425f-9efe-dc09e8b963d7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.108422] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2380.108732] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a7ba7cf-0ca0-49e2-b50a-e7ea664766c0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.116019] env[63279]: DEBUG oslo_vmware.api [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2380.116019] env[63279]: value = "task-2088143" [ 2380.116019] env[63279]: _type = "Task" [ 2380.116019] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2380.125819] env[63279]: DEBUG oslo_vmware.api [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2380.208637] env[63279]: DEBUG oslo_vmware.api [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2380.464120] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2380.464335] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2380.464555] env[63279]: DEBUG nova.network.neutron [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2380.548008] env[63279]: DEBUG nova.network.neutron [req-12cd1219-7f1b-4b67-a21b-3dda3659dde0 req-56f61346-bbb0-464e-8c3d-79c8ab41d75f service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updated VIF entry in instance network info cache for port be3c9abe-2016-4ca7-9982-16776729e694. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2380.548345] env[63279]: DEBUG nova.network.neutron [req-12cd1219-7f1b-4b67-a21b-3dda3659dde0 req-56f61346-bbb0-464e-8c3d-79c8ab41d75f service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance_info_cache with network_info: [{"id": "be3c9abe-2016-4ca7-9982-16776729e694", "address": "fa:16:3e:95:a8:19", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3c9abe-20", "ovs_interfaceid": "be3c9abe-2016-4ca7-9982-16776729e694", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2380.586357] env[63279]: DEBUG nova.network.neutron [None req-59def4ca-3d00-4f64-89f0-4fa6460408be tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Updating instance_info_cache with network_info: [{"id": "15cdfe62-d983-4e01-beb9-1947d51443e0", "address": "fa:16:3e:a4:65:02", "network": {"id": "f7000655-b20b-461d-9d08-f4cb8a85522e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-686033866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7674483744fe490b8cbe75532dfad77c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15cdfe62-d9", "ovs_interfaceid": "15cdfe62-d983-4e01-beb9-1947d51443e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2380.625643] env[63279]: DEBUG oslo_vmware.api [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088143, 'name': PowerOffVM_Task, 'duration_secs': 0.286644} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2380.625918] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2380.626101] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2380.626361] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39cfe2a1-2f59-41ba-a738-0a453f9d0f71 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.709355] env[63279]: DEBUG oslo_vmware.api [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2380.736617] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2380.736851] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2380.737090] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleting the datastore file [datastore1] 211f30e0-f5ab-4c05-975a-63fdc4fed7c7 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2380.737457] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-667f992c-5aca-4bcc-b0a5-bd8476724c21 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.744192] env[63279]: DEBUG oslo_vmware.api [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2380.744192] env[63279]: value = "task-2088145" [ 2380.744192] env[63279]: _type = "Task" [ 2380.744192] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2380.752528] env[63279]: DEBUG oslo_vmware.api [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088145, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2381.050966] env[63279]: DEBUG oslo_concurrency.lockutils [req-12cd1219-7f1b-4b67-a21b-3dda3659dde0 req-56f61346-bbb0-464e-8c3d-79c8ab41d75f service nova] Releasing lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2381.089366] env[63279]: DEBUG oslo_concurrency.lockutils [None req-59def4ca-3d00-4f64-89f0-4fa6460408be tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Releasing lock "refresh_cache-19e10ee4-99d1-44b9-9354-4c162d541a1f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2381.200563] env[63279]: DEBUG nova.network.neutron [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance_info_cache with network_info: [{"id": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "address": "fa:16:3e:cc:16:0a", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7baa1106-63", "ovs_interfaceid": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2381.211313] env[63279]: DEBUG oslo_vmware.api [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088136, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2381.254060] env[63279]: DEBUG oslo_vmware.api [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088145, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.355532} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2381.254369] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2381.254577] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2381.254762] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2381.254957] env[63279]: INFO nova.compute.manager [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2381.255234] env[63279]: DEBUG oslo.service.loopingcall [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2381.255450] env[63279]: DEBUG nova.compute.manager [-] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2381.255555] env[63279]: DEBUG nova.network.neutron [-] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2381.706417] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2381.713624] env[63279]: DEBUG oslo_vmware.api [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088136, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2381.842285] env[63279]: DEBUG nova.compute.manager [req-a0faf4a0-f9c7-4bc5-bc5e-727ce042bf53 req-4bd280e3-3cdf-44d8-a9d4-2581b93a8ef4 service nova] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Received event network-vif-deleted-a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2381.842578] env[63279]: INFO nova.compute.manager [req-a0faf4a0-f9c7-4bc5-bc5e-727ce042bf53 req-4bd280e3-3cdf-44d8-a9d4-2581b93a8ef4 service nova] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Neutron deleted interface a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9; detaching it from the instance and deleting it from the info cache [ 2381.842817] env[63279]: DEBUG nova.network.neutron [req-a0faf4a0-f9c7-4bc5-bc5e-727ce042bf53 req-4bd280e3-3cdf-44d8-a9d4-2581b93a8ef4 service nova] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2382.046783] env[63279]: DEBUG nova.network.neutron [-] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2382.094780] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-59def4ca-3d00-4f64-89f0-4fa6460408be tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2382.095113] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd802f79-1679-46ad-83af-9650b2c11e64 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.101763] env[63279]: DEBUG oslo_vmware.api [None req-59def4ca-3d00-4f64-89f0-4fa6460408be tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2382.101763] env[63279]: value = "task-2088146" [ 2382.101763] env[63279]: _type = "Task" [ 2382.101763] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2382.110216] env[63279]: DEBUG oslo_vmware.api [None req-59def4ca-3d00-4f64-89f0-4fa6460408be tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088146, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2382.216904] env[63279]: DEBUG oslo_vmware.api [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088136, 'name': ReconfigVM_Task, 'duration_secs': 5.886266} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2382.217415] env[63279]: DEBUG oslo_concurrency.lockutils [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2382.217626] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Reconfigured VM to detach interface {{(pid=63279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 2382.346198] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7ff98b7d-262c-4a5f-96d5-e8ff741f8796 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.355663] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdbfba4-549f-47da-b681-f1b35c875c68 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.384067] env[63279]: DEBUG nova.compute.manager [req-a0faf4a0-f9c7-4bc5-bc5e-727ce042bf53 req-4bd280e3-3cdf-44d8-a9d4-2581b93a8ef4 service nova] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Detach interface failed, port_id=a3f9bcf2-6c4a-42e2-867d-c0fe22f86ac9, reason: Instance 211f30e0-f5ab-4c05-975a-63fdc4fed7c7 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2382.551289] env[63279]: INFO nova.compute.manager [-] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Took 1.29 seconds to deallocate network for instance. [ 2382.611749] env[63279]: DEBUG oslo_vmware.api [None req-59def4ca-3d00-4f64-89f0-4fa6460408be tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088146, 'name': PowerOnVM_Task, 'duration_secs': 0.494596} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2382.611749] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-59def4ca-3d00-4f64-89f0-4fa6460408be tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2382.611749] env[63279]: DEBUG nova.compute.manager [None req-59def4ca-3d00-4f64-89f0-4fa6460408be tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2382.612534] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94225147-dbe0-463e-950e-65c7f7654051 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.056720] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2383.057089] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2383.057209] env[63279]: DEBUG nova.objects.instance [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lazy-loading 'resources' on Instance uuid 211f30e0-f5ab-4c05-975a-63fdc4fed7c7 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2383.225046] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36426e64-ca42-4ece-865d-add023191b39 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.244099] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance 'fcdd3076-2b53-4850-9730-2f877e2cabfd' progress to 0 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2383.596036] env[63279]: DEBUG oslo_concurrency.lockutils [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2383.596236] env[63279]: DEBUG oslo_concurrency.lockutils [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2383.596418] env[63279]: DEBUG nova.network.neutron [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2383.696414] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd3a176-5f34-44ba-bb84-dcce3bdc3b7d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.704699] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9e2f07-757f-4d52-ba81-770181a2a9dc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.737308] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543f99ed-bb7c-4e75-8067-2602aa7c549b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.747059] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a05908b-378e-4a7b-867c-5f0077893225 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.752478] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2383.752748] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62796828-f6aa-4e4a-9378-479b2031c0ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.764496] env[63279]: DEBUG nova.compute.provider_tree [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2383.767199] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2383.767199] env[63279]: value = "task-2088147" [ 2383.767199] env[63279]: _type = "Task" [ 2383.767199] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2383.777115] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088147, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2384.271146] env[63279]: DEBUG nova.scheduler.client.report [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2384.278602] env[63279]: DEBUG nova.compute.manager [req-d6b5f81f-05fc-43d5-b7a2-a70583455498 req-51c8676f-6def-4a06-b6ac-c897ec30fa8e service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Received event network-changed-d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2384.278806] env[63279]: DEBUG nova.compute.manager [req-d6b5f81f-05fc-43d5-b7a2-a70583455498 req-51c8676f-6def-4a06-b6ac-c897ec30fa8e service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Refreshing instance network info cache due to event network-changed-d0192def-50a0-40c0-9921-fbdf13e63ffb. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2384.280837] env[63279]: DEBUG oslo_concurrency.lockutils [req-d6b5f81f-05fc-43d5-b7a2-a70583455498 req-51c8676f-6def-4a06-b6ac-c897ec30fa8e service nova] Acquiring lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2384.292375] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088147, 'name': PowerOffVM_Task, 'duration_secs': 0.216703} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2384.292652] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2384.292827] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance 'fcdd3076-2b53-4850-9730-2f877e2cabfd' progress to 17 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2384.408320] env[63279]: INFO nova.network.neutron [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Port 5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 2384.408707] env[63279]: DEBUG nova.network.neutron [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updating instance_info_cache with network_info: [{"id": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "address": "fa:16:3e:0c:f0:a6", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0192def-50", "ovs_interfaceid": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2384.546562] env[63279]: DEBUG oslo_vmware.rw_handles [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e214e2-c653-2d92-389f-35d8cc0e20ae/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2384.547598] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965f5a15-7726-44dc-96a6-d3d593154b1b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.555377] env[63279]: DEBUG oslo_vmware.rw_handles [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e214e2-c653-2d92-389f-35d8cc0e20ae/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2384.555577] env[63279]: ERROR oslo_vmware.rw_handles [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e214e2-c653-2d92-389f-35d8cc0e20ae/disk-0.vmdk due to incomplete transfer. [ 2384.555839] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-db917308-8981-44de-907e-b2003b9f4669 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.563303] env[63279]: DEBUG oslo_vmware.rw_handles [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e214e2-c653-2d92-389f-35d8cc0e20ae/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2384.563545] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Uploaded image 4faf9f78-8c3c-4094-a5ab-792d6ef75b92 to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2384.566371] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2384.566768] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-80767c23-84ed-4f50-aba8-df04ca705335 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.572606] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2384.572606] env[63279]: value = "task-2088148" [ 2384.572606] env[63279]: _type = "Task" [ 2384.572606] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2384.581404] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2384.781147] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.724s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2384.799441] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2384.799758] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2384.799964] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2384.800226] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2384.800412] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2384.800635] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2384.800956] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2384.801171] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2384.801397] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2384.801651] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2384.801972] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2384.808194] env[63279]: INFO nova.scheduler.client.report [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleted allocations for instance 211f30e0-f5ab-4c05-975a-63fdc4fed7c7 [ 2384.809358] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68dbb704-6cc8-4603-a64f-0fcf5539b8a4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.828789] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2384.828789] env[63279]: value = "task-2088149" [ 2384.828789] env[63279]: _type = "Task" [ 2384.828789] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2384.837352] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088149, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2384.911926] env[63279]: DEBUG oslo_concurrency.lockutils [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2384.914307] env[63279]: DEBUG oslo_concurrency.lockutils [req-d6b5f81f-05fc-43d5-b7a2-a70583455498 req-51c8676f-6def-4a06-b6ac-c897ec30fa8e service nova] Acquired lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2384.914542] env[63279]: DEBUG nova.network.neutron [req-d6b5f81f-05fc-43d5-b7a2-a70583455498 req-51c8676f-6def-4a06-b6ac-c897ec30fa8e service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Refreshing network info cache for port d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2384.950727] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "interface-36354325-dee0-406e-8eb6-bc3cf347a403-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2384.950988] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-36354325-dee0-406e-8eb6-bc3cf347a403-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2384.951383] env[63279]: DEBUG nova.objects.instance [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'flavor' on Instance uuid 36354325-dee0-406e-8eb6-bc3cf347a403 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2385.082360] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2385.327830] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5259d80-0b0b-40a7-990d-a8a925e7909a tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "211f30e0-f5ab-4c05-975a-63fdc4fed7c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.736s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2385.339265] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088149, 'name': ReconfigVM_Task, 'duration_secs': 0.172434} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2385.339886] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance 'fcdd3076-2b53-4850-9730-2f877e2cabfd' progress to 33 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2385.418309] env[63279]: DEBUG oslo_concurrency.lockutils [None req-58427c31-6c1a-456a-adc2-d8579ca1ddff tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-6f839780-be92-4d99-a96d-1fc14c819599-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.878s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2385.552326] env[63279]: DEBUG nova.objects.instance [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'pci_requests' on Instance uuid 36354325-dee0-406e-8eb6-bc3cf347a403 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2385.585874] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2385.844579] env[63279]: DEBUG nova.network.neutron [req-d6b5f81f-05fc-43d5-b7a2-a70583455498 req-51c8676f-6def-4a06-b6ac-c897ec30fa8e service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updated VIF entry in instance network info cache for port d0192def-50a0-40c0-9921-fbdf13e63ffb. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2385.844956] env[63279]: DEBUG nova.network.neutron [req-d6b5f81f-05fc-43d5-b7a2-a70583455498 req-51c8676f-6def-4a06-b6ac-c897ec30fa8e service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updating instance_info_cache with network_info: [{"id": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "address": "fa:16:3e:0c:f0:a6", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0192def-50", "ovs_interfaceid": "d0192def-50a0-40c0-9921-fbdf13e63ffb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2385.847908] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2385.849446] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2385.849446] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2385.849446] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2385.849446] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2385.849446] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2385.849446] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2385.849446] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2385.849446] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2385.850105] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2385.850105] env[63279]: DEBUG nova.virt.hardware [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2385.854994] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Reconfiguring VM instance instance-00000047 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2385.855515] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07f1ff3a-c34a-479d-a573-c525fe185d51 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.875197] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2385.875197] env[63279]: value = "task-2088150" [ 2385.875197] env[63279]: _type = "Task" [ 2385.875197] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2385.882979] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088150, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2386.055305] env[63279]: DEBUG nova.objects.base [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Object Instance<36354325-dee0-406e-8eb6-bc3cf347a403> lazy-loaded attributes: flavor,pci_requests {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2386.055517] env[63279]: DEBUG nova.network.neutron [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2386.084856] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2386.123061] env[63279]: DEBUG nova.policy [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6343b207f7294f5fa2a8111940083fb0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5b21bc5072e4945a19a782dd9561709', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2386.312722] env[63279]: DEBUG nova.compute.manager [req-dc78b875-a1b3-4d76-8e44-1ac81167a86a req-58378407-0874-47aa-b38d-63f2cab9f95d service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Received event network-changed-a05a3f68-d80e-46c9-9065-1f803e7c04ff {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2386.312722] env[63279]: DEBUG nova.compute.manager [req-dc78b875-a1b3-4d76-8e44-1ac81167a86a req-58378407-0874-47aa-b38d-63f2cab9f95d service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Refreshing instance network info cache due to event network-changed-a05a3f68-d80e-46c9-9065-1f803e7c04ff. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2386.312940] env[63279]: DEBUG oslo_concurrency.lockutils [req-dc78b875-a1b3-4d76-8e44-1ac81167a86a req-58378407-0874-47aa-b38d-63f2cab9f95d service nova] Acquiring lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2386.313102] env[63279]: DEBUG oslo_concurrency.lockutils [req-dc78b875-a1b3-4d76-8e44-1ac81167a86a req-58378407-0874-47aa-b38d-63f2cab9f95d service nova] Acquired lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2386.313277] env[63279]: DEBUG nova.network.neutron [req-dc78b875-a1b3-4d76-8e44-1ac81167a86a req-58378407-0874-47aa-b38d-63f2cab9f95d service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Refreshing network info cache for port a05a3f68-d80e-46c9-9065-1f803e7c04ff {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2386.356968] env[63279]: DEBUG oslo_concurrency.lockutils [req-d6b5f81f-05fc-43d5-b7a2-a70583455498 req-51c8676f-6def-4a06-b6ac-c897ec30fa8e service nova] Releasing lock "refresh_cache-6f839780-be92-4d99-a96d-1fc14c819599" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2386.384707] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088150, 'name': ReconfigVM_Task, 'duration_secs': 0.373383} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2386.384984] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Reconfigured VM instance instance-00000047 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2386.385760] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-506a2b81-2172-48b8-ba38-060c47b4670b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.408283] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] fcdd3076-2b53-4850-9730-2f877e2cabfd/fcdd3076-2b53-4850-9730-2f877e2cabfd.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2386.408579] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd9d2e54-6c97-4f76-8bd2-8362eae7a6df {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.426388] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2386.426388] env[63279]: value = "task-2088151" [ 2386.426388] env[63279]: _type = "Task" [ 2386.426388] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2386.434245] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088151, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2386.584859] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2386.734039] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "5961728f-c65a-425d-8f6d-5763c79bbe1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2386.734039] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "5961728f-c65a-425d-8f6d-5763c79bbe1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2386.937750] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088151, 'name': ReconfigVM_Task, 'duration_secs': 0.372917} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2386.938044] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Reconfigured VM instance instance-00000047 to attach disk [datastore1] fcdd3076-2b53-4850-9730-2f877e2cabfd/fcdd3076-2b53-4850-9730-2f877e2cabfd.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2386.938319] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance 'fcdd3076-2b53-4850-9730-2f877e2cabfd' progress to 50 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2387.028573] env[63279]: DEBUG nova.network.neutron [req-dc78b875-a1b3-4d76-8e44-1ac81167a86a req-58378407-0874-47aa-b38d-63f2cab9f95d service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Updated VIF entry in instance network info cache for port a05a3f68-d80e-46c9-9065-1f803e7c04ff. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2387.028971] env[63279]: DEBUG nova.network.neutron [req-dc78b875-a1b3-4d76-8e44-1ac81167a86a req-58378407-0874-47aa-b38d-63f2cab9f95d service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Updating instance_info_cache with network_info: [{"id": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "address": "fa:16:3e:8e:d1:f4", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa05a3f68-d8", "ovs_interfaceid": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2387.084398] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2387.236972] env[63279]: DEBUG nova.compute.manager [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2387.444413] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e2c652-19cb-40fc-a229-7376892e74ff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.463647] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2963644-4136-4cdf-92d7-f7e8f6404b7d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.481186] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance 'fcdd3076-2b53-4850-9730-2f877e2cabfd' progress to 67 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2387.532250] env[63279]: DEBUG oslo_concurrency.lockutils [req-dc78b875-a1b3-4d76-8e44-1ac81167a86a req-58378407-0874-47aa-b38d-63f2cab9f95d service nova] Releasing lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2387.587768] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2387.763503] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2387.763778] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2387.765355] env[63279]: INFO nova.compute.claims [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2388.020405] env[63279]: DEBUG nova.network.neutron [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Port 7baa1106-63c3-49cd-8d80-54c763d1b2b3 binding to destination host cpu-1 is already ACTIVE {{(pid=63279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2388.085997] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2388.442208] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2388.442208] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2388.442208] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 2388.588606] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2388.898549] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41636215-f083-4e5f-a8d9-18cc62ebeeb6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.906074] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e17f949-d211-4418-9a9a-35df8fd8727a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.935158] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0a3321-4e13-4ba4-9b3e-f2f6f5a38eca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.941800] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908a5f1c-1666-4ff5-8a28-865371ed2fd4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.955592] env[63279]: DEBUG nova.compute.provider_tree [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2389.042531] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "fcdd3076-2b53-4850-9730-2f877e2cabfd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2389.042531] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2389.042755] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2389.086366] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2389.458707] env[63279]: DEBUG nova.scheduler.client.report [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2389.586888] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2389.963621] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.200s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2389.964152] env[63279]: DEBUG nova.compute.manager [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2390.075963] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2390.076175] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2390.076355] env[63279]: DEBUG nova.network.neutron [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2390.087889] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2390.468465] env[63279]: DEBUG nova.compute.utils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2390.470786] env[63279]: DEBUG nova.compute.manager [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2390.470786] env[63279]: DEBUG nova.network.neutron [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2390.508654] env[63279]: DEBUG nova.policy [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55199c771de04067a936194078ef99f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4fcef39e334249afb9636455802059c5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2390.589462] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2390.750079] env[63279]: DEBUG nova.network.neutron [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Successfully created port: 92ca2582-9c5e-4cf7-bdf0-a0135ce33065 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2390.867571] env[63279]: DEBUG nova.network.neutron [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance_info_cache with network_info: [{"id": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "address": "fa:16:3e:cc:16:0a", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7baa1106-63", "ovs_interfaceid": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2390.970972] env[63279]: DEBUG nova.compute.manager [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2391.090169] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2391.370530] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2391.592630] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088148, 'name': Destroy_Task, 'duration_secs': 6.902559} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2391.592963] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Destroyed the VM [ 2391.593246] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2391.593475] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-44e8d05d-68c6-4d43-a9ce-a4a9f18a0087 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.599045] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2391.599045] env[63279]: value = "task-2088152" [ 2391.599045] env[63279]: _type = "Task" [ 2391.599045] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2391.606481] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088152, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2391.896459] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114cf8f2-b7b1-4256-bedb-328cd559542c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.915503] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d451df-7223-4545-90ff-1fc64b96d6cb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.922746] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance 'fcdd3076-2b53-4850-9730-2f877e2cabfd' progress to 83 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2391.979760] env[63279]: DEBUG nova.compute.manager [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2392.005843] env[63279]: DEBUG nova.virt.hardware [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2392.006122] env[63279]: DEBUG nova.virt.hardware [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2392.006291] env[63279]: DEBUG nova.virt.hardware [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2392.006484] env[63279]: DEBUG nova.virt.hardware [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2392.006632] env[63279]: DEBUG nova.virt.hardware [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2392.006784] env[63279]: DEBUG nova.virt.hardware [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2392.006991] env[63279]: DEBUG nova.virt.hardware [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2392.007173] env[63279]: DEBUG nova.virt.hardware [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2392.007346] env[63279]: DEBUG nova.virt.hardware [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2392.007509] env[63279]: DEBUG nova.virt.hardware [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2392.007683] env[63279]: DEBUG nova.virt.hardware [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2392.008541] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65731be7-123f-4623-8856-e2f8c905b9fc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.016793] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbbccfb-d192-4738-a696-5e1e13a3c347 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.108698] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088152, 'name': RemoveSnapshot_Task, 'duration_secs': 0.352233} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2392.109044] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2392.110033] env[63279]: DEBUG nova.compute.manager [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2392.110132] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56679ee4-b702-427f-b039-5363a40bf1ed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.429151] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2392.429417] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ce75486-c033-4751-93e0-15b8e6cd71f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.436840] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2392.436840] env[63279]: value = "task-2088153" [ 2392.436840] env[63279]: _type = "Task" [ 2392.436840] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2392.444774] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088153, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2392.529866] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "refresh_cache-69b3269a-2ba3-4f5f-a29c-62518c93da3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2392.530043] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquired lock "refresh_cache-69b3269a-2ba3-4f5f-a29c-62518c93da3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2392.530205] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Forcefully refreshing network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2392.621701] env[63279]: INFO nova.compute.manager [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Shelve offloading [ 2392.662779] env[63279]: DEBUG nova.compute.manager [req-a132d823-2acc-4a96-87ed-7096b5d1473e req-25dff663-095c-4d32-920b-319e6c5ea16b service nova] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Received event network-vif-plugged-92ca2582-9c5e-4cf7-bdf0-a0135ce33065 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2392.663062] env[63279]: DEBUG oslo_concurrency.lockutils [req-a132d823-2acc-4a96-87ed-7096b5d1473e req-25dff663-095c-4d32-920b-319e6c5ea16b service nova] Acquiring lock "5961728f-c65a-425d-8f6d-5763c79bbe1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2392.663337] env[63279]: DEBUG oslo_concurrency.lockutils [req-a132d823-2acc-4a96-87ed-7096b5d1473e req-25dff663-095c-4d32-920b-319e6c5ea16b service nova] Lock "5961728f-c65a-425d-8f6d-5763c79bbe1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2392.663558] env[63279]: DEBUG oslo_concurrency.lockutils [req-a132d823-2acc-4a96-87ed-7096b5d1473e req-25dff663-095c-4d32-920b-319e6c5ea16b service nova] Lock "5961728f-c65a-425d-8f6d-5763c79bbe1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2392.663824] env[63279]: DEBUG nova.compute.manager [req-a132d823-2acc-4a96-87ed-7096b5d1473e req-25dff663-095c-4d32-920b-319e6c5ea16b service nova] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] No waiting events found dispatching network-vif-plugged-92ca2582-9c5e-4cf7-bdf0-a0135ce33065 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2392.664071] env[63279]: WARNING nova.compute.manager [req-a132d823-2acc-4a96-87ed-7096b5d1473e req-25dff663-095c-4d32-920b-319e6c5ea16b service nova] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Received unexpected event network-vif-plugged-92ca2582-9c5e-4cf7-bdf0-a0135ce33065 for instance with vm_state building and task_state spawning. [ 2392.685028] env[63279]: DEBUG nova.compute.manager [req-253ebf3a-60b5-4214-8cd7-5161ef4b36dc req-c8a5e3ad-cfe1-478c-bd37-326788b912a8 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Received event network-vif-plugged-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2392.685311] env[63279]: DEBUG oslo_concurrency.lockutils [req-253ebf3a-60b5-4214-8cd7-5161ef4b36dc req-c8a5e3ad-cfe1-478c-bd37-326788b912a8 service nova] Acquiring lock "36354325-dee0-406e-8eb6-bc3cf347a403-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2392.685522] env[63279]: DEBUG oslo_concurrency.lockutils [req-253ebf3a-60b5-4214-8cd7-5161ef4b36dc req-c8a5e3ad-cfe1-478c-bd37-326788b912a8 service nova] Lock "36354325-dee0-406e-8eb6-bc3cf347a403-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2392.685693] env[63279]: DEBUG oslo_concurrency.lockutils [req-253ebf3a-60b5-4214-8cd7-5161ef4b36dc req-c8a5e3ad-cfe1-478c-bd37-326788b912a8 service nova] Lock "36354325-dee0-406e-8eb6-bc3cf347a403-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2392.685860] env[63279]: DEBUG nova.compute.manager [req-253ebf3a-60b5-4214-8cd7-5161ef4b36dc req-c8a5e3ad-cfe1-478c-bd37-326788b912a8 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] No waiting events found dispatching network-vif-plugged-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2392.686042] env[63279]: WARNING nova.compute.manager [req-253ebf3a-60b5-4214-8cd7-5161ef4b36dc req-c8a5e3ad-cfe1-478c-bd37-326788b912a8 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Received unexpected event network-vif-plugged-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032 for instance with vm_state active and task_state None. [ 2392.730641] env[63279]: DEBUG nova.network.neutron [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Successfully updated port: 92ca2582-9c5e-4cf7-bdf0-a0135ce33065 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2392.773827] env[63279]: DEBUG nova.network.neutron [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Successfully updated port: 5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2392.947517] env[63279]: DEBUG oslo_vmware.api [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088153, 'name': PowerOnVM_Task, 'duration_secs': 0.359367} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2392.947867] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2392.948107] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-cb50f17b-9545-4aef-91a6-f0a031fbfe1b tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance 'fcdd3076-2b53-4850-9730-2f877e2cabfd' progress to 100 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2393.124970] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2393.125335] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d9f18fc-0131-4d85-b114-06a5e557a6f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2393.132812] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2393.132812] env[63279]: value = "task-2088154" [ 2393.132812] env[63279]: _type = "Task" [ 2393.132812] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2393.140216] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088154, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2393.237027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "refresh_cache-5961728f-c65a-425d-8f6d-5763c79bbe1a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2393.237126] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "refresh_cache-5961728f-c65a-425d-8f6d-5763c79bbe1a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2393.237297] env[63279]: DEBUG nova.network.neutron [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2393.283504] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2393.283725] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2393.283973] env[63279]: DEBUG nova.network.neutron [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2393.600288] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2393.600441] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2393.644301] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] VM already powered off {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2393.644525] env[63279]: DEBUG nova.compute.manager [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2393.645280] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744e1de6-ede3-4dfb-a3eb-249535cf3fe2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2393.651298] env[63279]: DEBUG oslo_concurrency.lockutils [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2393.651468] env[63279]: DEBUG oslo_concurrency.lockutils [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2393.651639] env[63279]: DEBUG nova.network.neutron [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2393.754530] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Updating instance_info_cache with network_info: [{"id": "482c0084-ab15-4c03-beb3-585f9e66c683", "address": "fa:16:3e:41:26:b2", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap482c0084-ab", "ovs_interfaceid": "482c0084-ab15-4c03-beb3-585f9e66c683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2393.767847] env[63279]: DEBUG nova.network.neutron [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2393.816821] env[63279]: WARNING nova.network.neutron [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] e0e614b7-de4b-485e-8824-582faae8febd already exists in list: networks containing: ['e0e614b7-de4b-485e-8824-582faae8febd']. ignoring it [ 2394.092440] env[63279]: DEBUG nova.network.neutron [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Updating instance_info_cache with network_info: [{"id": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "address": "fa:16:3e:8e:d1:f4", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa05a3f68-d8", "ovs_interfaceid": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032", "address": "fa:16:3e:06:6c:96", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5426dc4d-6f", "ovs_interfaceid": "5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2394.104026] env[63279]: DEBUG nova.compute.manager [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2394.117972] env[63279]: DEBUG nova.network.neutron [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Updating instance_info_cache with network_info: [{"id": "92ca2582-9c5e-4cf7-bdf0-a0135ce33065", "address": "fa:16:3e:63:c7:89", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92ca2582-9c", "ovs_interfaceid": "92ca2582-9c5e-4cf7-bdf0-a0135ce33065", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2394.256921] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Releasing lock "refresh_cache-69b3269a-2ba3-4f5f-a29c-62518c93da3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2394.257219] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Updated the network info_cache for instance {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10518}} [ 2394.257327] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.257486] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.257636] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.257785] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.257928] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.258089] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.258220] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 2394.258367] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.368021] env[63279]: DEBUG nova.network.neutron [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Updating instance_info_cache with network_info: [{"id": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "address": "fa:16:3e:ff:42:fd", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfd0d031-3c", "ovs_interfaceid": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2394.595431] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2394.595677] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2394.595858] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2394.596733] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf4f06a-e529-420a-8bac-d9f937ab92ce {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2394.615446] env[63279]: DEBUG nova.virt.hardware [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2394.615657] env[63279]: DEBUG nova.virt.hardware [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2394.615814] env[63279]: DEBUG nova.virt.hardware [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2394.616007] env[63279]: DEBUG nova.virt.hardware [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2394.616174] env[63279]: DEBUG nova.virt.hardware [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2394.616323] env[63279]: DEBUG nova.virt.hardware [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2394.616525] env[63279]: DEBUG nova.virt.hardware [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2394.616684] env[63279]: DEBUG nova.virt.hardware [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2394.616853] env[63279]: DEBUG nova.virt.hardware [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2394.617025] env[63279]: DEBUG nova.virt.hardware [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2394.617212] env[63279]: DEBUG nova.virt.hardware [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2394.623294] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Reconfiguring VM to attach interface {{(pid=63279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 2394.625610] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "refresh_cache-5961728f-c65a-425d-8f6d-5763c79bbe1a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2394.625879] env[63279]: DEBUG nova.compute.manager [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Instance network_info: |[{"id": "92ca2582-9c5e-4cf7-bdf0-a0135ce33065", "address": "fa:16:3e:63:c7:89", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92ca2582-9c", "ovs_interfaceid": "92ca2582-9c5e-4cf7-bdf0-a0135ce33065", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2394.626448] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-128c7f4e-aab8-4e4a-86bb-ff12a242fca2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2394.637997] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:c7:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9fa933df-d66f-485e-8cf9-eda7f1a7f283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92ca2582-9c5e-4cf7-bdf0-a0135ce33065', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2394.644853] env[63279]: DEBUG oslo.service.loopingcall [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2394.645308] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2394.645884] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f18962c5-dc09-409b-b849-6f6e4395aba4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2394.660286] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2394.660522] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2394.661928] env[63279]: INFO nova.compute.claims [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2394.665356] env[63279]: DEBUG oslo_vmware.api [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2394.665356] env[63279]: value = "task-2088155" [ 2394.665356] env[63279]: _type = "Task" [ 2394.665356] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2394.670098] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2394.670098] env[63279]: value = "task-2088156" [ 2394.670098] env[63279]: _type = "Task" [ 2394.670098] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2394.675680] env[63279]: DEBUG oslo_vmware.api [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088155, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2394.680542] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088156, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2394.690606] env[63279]: DEBUG nova.compute.manager [req-eb45d37f-5201-475b-ac4d-18c77d04db69 req-f3fe805c-ce88-4ed7-8205-e02e060315f4 service nova] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Received event network-changed-92ca2582-9c5e-4cf7-bdf0-a0135ce33065 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2394.691103] env[63279]: DEBUG nova.compute.manager [req-eb45d37f-5201-475b-ac4d-18c77d04db69 req-f3fe805c-ce88-4ed7-8205-e02e060315f4 service nova] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Refreshing instance network info cache due to event network-changed-92ca2582-9c5e-4cf7-bdf0-a0135ce33065. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2394.691103] env[63279]: DEBUG oslo_concurrency.lockutils [req-eb45d37f-5201-475b-ac4d-18c77d04db69 req-f3fe805c-ce88-4ed7-8205-e02e060315f4 service nova] Acquiring lock "refresh_cache-5961728f-c65a-425d-8f6d-5763c79bbe1a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2394.691437] env[63279]: DEBUG oslo_concurrency.lockutils [req-eb45d37f-5201-475b-ac4d-18c77d04db69 req-f3fe805c-ce88-4ed7-8205-e02e060315f4 service nova] Acquired lock "refresh_cache-5961728f-c65a-425d-8f6d-5763c79bbe1a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2394.691437] env[63279]: DEBUG nova.network.neutron [req-eb45d37f-5201-475b-ac4d-18c77d04db69 req-f3fe805c-ce88-4ed7-8205-e02e060315f4 service nova] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Refreshing network info cache for port 92ca2582-9c5e-4cf7-bdf0-a0135ce33065 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2394.709873] env[63279]: DEBUG nova.compute.manager [req-a37ea268-3f6f-45c7-8a0e-4d034c43faf9 req-49a61642-59ae-4d0d-93ad-0e96ba8b1577 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Received event network-changed-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2394.710184] env[63279]: DEBUG nova.compute.manager [req-a37ea268-3f6f-45c7-8a0e-4d034c43faf9 req-49a61642-59ae-4d0d-93ad-0e96ba8b1577 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Refreshing instance network info cache due to event network-changed-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2394.710404] env[63279]: DEBUG oslo_concurrency.lockutils [req-a37ea268-3f6f-45c7-8a0e-4d034c43faf9 req-49a61642-59ae-4d0d-93ad-0e96ba8b1577 service nova] Acquiring lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2394.710550] env[63279]: DEBUG oslo_concurrency.lockutils [req-a37ea268-3f6f-45c7-8a0e-4d034c43faf9 req-49a61642-59ae-4d0d-93ad-0e96ba8b1577 service nova] Acquired lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2394.710712] env[63279]: DEBUG nova.network.neutron [req-a37ea268-3f6f-45c7-8a0e-4d034c43faf9 req-49a61642-59ae-4d0d-93ad-0e96ba8b1577 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Refreshing network info cache for port 5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2394.761838] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2394.874517] env[63279]: DEBUG oslo_concurrency.lockutils [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2394.995533] env[63279]: DEBUG oslo_concurrency.lockutils [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2394.995890] env[63279]: DEBUG oslo_concurrency.lockutils [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2394.996212] env[63279]: DEBUG nova.compute.manager [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Going to confirm migration 7 {{(pid=63279) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 2395.180799] env[63279]: DEBUG oslo_vmware.api [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088155, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2395.184888] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088156, 'name': CreateVM_Task, 'duration_secs': 0.299382} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2395.185059] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2395.185720] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2395.185886] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2395.186221] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2395.186474] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6a4d1f3-0c05-4e3d-bcaf-2147b191150e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.191278] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2395.191278] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5251f3b8-1543-1565-e678-fcfc60b932bd" [ 2395.191278] env[63279]: _type = "Task" [ 2395.191278] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2395.201581] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5251f3b8-1543-1565-e678-fcfc60b932bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2395.262583] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2395.263551] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36b1a50-5156-473b-a895-65219af96d2a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.271622] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2395.271897] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a8e83a4-1db7-47c5-8106-b85a7b91aa49 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.417056] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2395.417278] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2395.417621] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleting the datastore file [datastore1] efddec10-b496-446e-a05a-72c9f2d86ed9 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2395.417819] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3484a09-ff44-49ee-b363-5699048188f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.424831] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2395.424831] env[63279]: value = "task-2088158" [ 2395.424831] env[63279]: _type = "Task" [ 2395.424831] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2395.433052] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088158, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2395.562329] env[63279]: DEBUG oslo_concurrency.lockutils [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2395.562551] env[63279]: DEBUG oslo_concurrency.lockutils [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2395.562714] env[63279]: DEBUG nova.network.neutron [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2395.562953] env[63279]: DEBUG nova.objects.instance [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lazy-loading 'info_cache' on Instance uuid fcdd3076-2b53-4850-9730-2f877e2cabfd {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2395.655208] env[63279]: DEBUG nova.network.neutron [req-eb45d37f-5201-475b-ac4d-18c77d04db69 req-f3fe805c-ce88-4ed7-8205-e02e060315f4 service nova] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Updated VIF entry in instance network info cache for port 92ca2582-9c5e-4cf7-bdf0-a0135ce33065. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2395.655595] env[63279]: DEBUG nova.network.neutron [req-eb45d37f-5201-475b-ac4d-18c77d04db69 req-f3fe805c-ce88-4ed7-8205-e02e060315f4 service nova] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Updating instance_info_cache with network_info: [{"id": "92ca2582-9c5e-4cf7-bdf0-a0135ce33065", "address": "fa:16:3e:63:c7:89", "network": {"id": "2caeac4f-4d6f-49f6-ad75-055171bad9b0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1264030443-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fcef39e334249afb9636455802059c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92ca2582-9c", "ovs_interfaceid": "92ca2582-9c5e-4cf7-bdf0-a0135ce33065", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2395.659370] env[63279]: DEBUG nova.network.neutron [req-a37ea268-3f6f-45c7-8a0e-4d034c43faf9 req-49a61642-59ae-4d0d-93ad-0e96ba8b1577 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Updated VIF entry in instance network info cache for port 5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2395.659370] env[63279]: DEBUG nova.network.neutron [req-a37ea268-3f6f-45c7-8a0e-4d034c43faf9 req-49a61642-59ae-4d0d-93ad-0e96ba8b1577 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Updating instance_info_cache with network_info: [{"id": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "address": "fa:16:3e:8e:d1:f4", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa05a3f68-d8", "ovs_interfaceid": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032", "address": "fa:16:3e:06:6c:96", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5426dc4d-6f", "ovs_interfaceid": "5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2395.680965] env[63279]: DEBUG oslo_vmware.api [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088155, 'name': ReconfigVM_Task, 'duration_secs': 0.564732} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2395.684122] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2395.684122] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Reconfigured VM to attach interface {{(pid=63279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 2395.704013] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5251f3b8-1543-1565-e678-fcfc60b932bd, 'name': SearchDatastore_Task, 'duration_secs': 0.011279} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2395.704013] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2395.704364] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2395.704616] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2395.704825] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2395.705097] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2395.705420] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97b48685-af67-4ce3-9d08-928c1e680ca2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.716222] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2395.716438] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2395.717173] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eaba27bd-486f-4538-ac31-9cf8e835d9a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.722236] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2395.722236] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d37077-3cb8-46aa-c52b-581dec00a2f9" [ 2395.722236] env[63279]: _type = "Task" [ 2395.722236] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2395.732510] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d37077-3cb8-46aa-c52b-581dec00a2f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2395.815124] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038ce6d6-2e9f-4c5e-8c9b-e8479a2ab5e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.822653] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e16211d-78af-4987-b026-ff465f6ff83c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.852359] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad1b1e7-2917-4a13-85e4-437bc24319eb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.859525] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2a7d37-fe88-4aa0-be83-aab38708b59d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.872679] env[63279]: DEBUG nova.compute.provider_tree [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2395.933630] env[63279]: DEBUG oslo_vmware.api [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088158, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135728} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2395.933867] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2395.934102] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2395.934300] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2395.952113] env[63279]: INFO nova.scheduler.client.report [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleted allocations for instance efddec10-b496-446e-a05a-72c9f2d86ed9 [ 2396.160379] env[63279]: DEBUG oslo_concurrency.lockutils [req-eb45d37f-5201-475b-ac4d-18c77d04db69 req-f3fe805c-ce88-4ed7-8205-e02e060315f4 service nova] Releasing lock "refresh_cache-5961728f-c65a-425d-8f6d-5763c79bbe1a" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2396.160917] env[63279]: DEBUG oslo_concurrency.lockutils [req-a37ea268-3f6f-45c7-8a0e-4d034c43faf9 req-49a61642-59ae-4d0d-93ad-0e96ba8b1577 service nova] Releasing lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2396.186247] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ffe0fdc-3da0-45de-bfdd-ef818739b8c0 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-36354325-dee0-406e-8eb6-bc3cf347a403-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 11.235s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2396.232567] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d37077-3cb8-46aa-c52b-581dec00a2f9, 'name': SearchDatastore_Task, 'duration_secs': 0.009403} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2396.233289] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfb6287a-4259-4625-a809-a0ee90609a45 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.238372] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2396.238372] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5277a758-e561-8e17-6792-758250fdb626" [ 2396.238372] env[63279]: _type = "Task" [ 2396.238372] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2396.245614] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5277a758-e561-8e17-6792-758250fdb626, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2396.375690] env[63279]: DEBUG nova.scheduler.client.report [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2396.456919] env[63279]: DEBUG oslo_concurrency.lockutils [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2396.748413] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5277a758-e561-8e17-6792-758250fdb626, 'name': SearchDatastore_Task, 'duration_secs': 0.014534} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2396.748683] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2396.748935] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 5961728f-c65a-425d-8f6d-5763c79bbe1a/5961728f-c65a-425d-8f6d-5763c79bbe1a.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2396.749207] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36c34f3c-dd61-43fd-bbb8-cb1a15750a62 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.756082] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2396.756082] env[63279]: value = "task-2088159" [ 2396.756082] env[63279]: _type = "Task" [ 2396.756082] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2396.763540] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088159, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2396.859857] env[63279]: DEBUG nova.network.neutron [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance_info_cache with network_info: [{"id": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "address": "fa:16:3e:cc:16:0a", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7baa1106-63", "ovs_interfaceid": "7baa1106-63c3-49cd-8d80-54c763d1b2b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2396.880773] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.220s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2396.881459] env[63279]: DEBUG nova.compute.manager [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2396.885082] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.123s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2396.885145] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2396.885311] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2396.885638] env[63279]: DEBUG oslo_concurrency.lockutils [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.429s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2396.885876] env[63279]: DEBUG nova.objects.instance [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lazy-loading 'resources' on Instance uuid efddec10-b496-446e-a05a-72c9f2d86ed9 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2396.887489] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066ddf5f-cd57-43bd-b551-5fc3b148ebd5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.894449] env[63279]: DEBUG nova.compute.manager [req-36edc0cf-8dbe-4f36-9ade-0fa6b7a6b413 req-e30f7d7d-c11e-4e59-a853-37704aef8d36 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Received event network-vif-unplugged-cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2396.894693] env[63279]: DEBUG oslo_concurrency.lockutils [req-36edc0cf-8dbe-4f36-9ade-0fa6b7a6b413 req-e30f7d7d-c11e-4e59-a853-37704aef8d36 service nova] Acquiring lock "efddec10-b496-446e-a05a-72c9f2d86ed9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2396.894934] env[63279]: DEBUG oslo_concurrency.lockutils [req-36edc0cf-8dbe-4f36-9ade-0fa6b7a6b413 req-e30f7d7d-c11e-4e59-a853-37704aef8d36 service nova] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2396.895141] env[63279]: DEBUG oslo_concurrency.lockutils [req-36edc0cf-8dbe-4f36-9ade-0fa6b7a6b413 req-e30f7d7d-c11e-4e59-a853-37704aef8d36 service nova] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2396.895357] env[63279]: DEBUG nova.compute.manager [req-36edc0cf-8dbe-4f36-9ade-0fa6b7a6b413 req-e30f7d7d-c11e-4e59-a853-37704aef8d36 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] No waiting events found dispatching network-vif-unplugged-cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2396.895616] env[63279]: WARNING nova.compute.manager [req-36edc0cf-8dbe-4f36-9ade-0fa6b7a6b413 req-e30f7d7d-c11e-4e59-a853-37704aef8d36 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Received unexpected event network-vif-unplugged-cfd0d031-3ccd-4ad0-9c99-6436a05c108d for instance with vm_state shelved_offloaded and task_state None. [ 2396.895772] env[63279]: DEBUG nova.compute.manager [req-36edc0cf-8dbe-4f36-9ade-0fa6b7a6b413 req-e30f7d7d-c11e-4e59-a853-37704aef8d36 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Received event network-changed-cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2396.896008] env[63279]: DEBUG nova.compute.manager [req-36edc0cf-8dbe-4f36-9ade-0fa6b7a6b413 req-e30f7d7d-c11e-4e59-a853-37704aef8d36 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Refreshing instance network info cache due to event network-changed-cfd0d031-3ccd-4ad0-9c99-6436a05c108d. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2396.896207] env[63279]: DEBUG oslo_concurrency.lockutils [req-36edc0cf-8dbe-4f36-9ade-0fa6b7a6b413 req-e30f7d7d-c11e-4e59-a853-37704aef8d36 service nova] Acquiring lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2396.896384] env[63279]: DEBUG oslo_concurrency.lockutils [req-36edc0cf-8dbe-4f36-9ade-0fa6b7a6b413 req-e30f7d7d-c11e-4e59-a853-37704aef8d36 service nova] Acquired lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2396.896583] env[63279]: DEBUG nova.network.neutron [req-36edc0cf-8dbe-4f36-9ade-0fa6b7a6b413 req-e30f7d7d-c11e-4e59-a853-37704aef8d36 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Refreshing network info cache for port cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2396.902263] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565857d2-19a8-4012-9bf9-3c2c4a92f52c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.919540] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840741d3-603a-44e6-9042-8edb44c4b48f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.927036] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c656051a-ad58-4f04-89b4-02ff32c7d872 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.959388] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179644MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2396.959534] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2397.265250] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088159, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446144} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2397.265505] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 5961728f-c65a-425d-8f6d-5763c79bbe1a/5961728f-c65a-425d-8f6d-5763c79bbe1a.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2397.265720] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2397.265965] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79be7b26-afd0-4140-8dd4-3b9899c4b175 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.272681] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2397.272681] env[63279]: value = "task-2088160" [ 2397.272681] env[63279]: _type = "Task" [ 2397.272681] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2397.279979] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088160, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2397.362519] env[63279]: DEBUG oslo_concurrency.lockutils [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-fcdd3076-2b53-4850-9730-2f877e2cabfd" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2397.362783] env[63279]: DEBUG nova.objects.instance [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lazy-loading 'migration_context' on Instance uuid fcdd3076-2b53-4850-9730-2f877e2cabfd {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2397.388982] env[63279]: DEBUG nova.compute.utils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2397.390279] env[63279]: DEBUG nova.compute.manager [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2397.390453] env[63279]: DEBUG nova.network.neutron [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2397.392645] env[63279]: DEBUG nova.objects.instance [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lazy-loading 'numa_topology' on Instance uuid efddec10-b496-446e-a05a-72c9f2d86ed9 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2397.462206] env[63279]: DEBUG nova.policy [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7cd64afda9f4eb488bdf76a36f2fee6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f9427c264e8e41998f579af352cb48cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2397.656990] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "interface-36354325-dee0-406e-8eb6-bc3cf347a403-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2397.657369] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-36354325-dee0-406e-8eb6-bc3cf347a403-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2397.747611] env[63279]: DEBUG nova.network.neutron [req-36edc0cf-8dbe-4f36-9ade-0fa6b7a6b413 req-e30f7d7d-c11e-4e59-a853-37704aef8d36 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Updated VIF entry in instance network info cache for port cfd0d031-3ccd-4ad0-9c99-6436a05c108d. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2397.747976] env[63279]: DEBUG nova.network.neutron [req-36edc0cf-8dbe-4f36-9ade-0fa6b7a6b413 req-e30f7d7d-c11e-4e59-a853-37704aef8d36 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Updating instance_info_cache with network_info: [{"id": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "address": "fa:16:3e:ff:42:fd", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapcfd0d031-3c", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2397.782952] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088160, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058387} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2397.783352] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2397.784065] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf57f71-ba66-4906-b376-efd97e1929a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.807947] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 5961728f-c65a-425d-8f6d-5763c79bbe1a/5961728f-c65a-425d-8f6d-5763c79bbe1a.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2397.808545] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb062927-27cf-4a7a-9109-5934b396591e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.829695] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2397.829695] env[63279]: value = "task-2088161" [ 2397.829695] env[63279]: _type = "Task" [ 2397.829695] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2397.842595] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088161, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2397.865679] env[63279]: DEBUG nova.objects.base [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2397.866604] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec4a77b-aefb-4f15-a361-8ce631f579d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.886640] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-445ef462-d7d4-4e02-8348-033515119cf8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.892381] env[63279]: DEBUG oslo_vmware.api [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2397.892381] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]521fb0ed-fefd-b86d-105d-78e3a9f73f83" [ 2397.892381] env[63279]: _type = "Task" [ 2397.892381] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2397.892941] env[63279]: DEBUG nova.compute.manager [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2397.896902] env[63279]: DEBUG nova.network.neutron [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Successfully created port: c72218a8-035f-4947-b145-503e00034664 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2397.901727] env[63279]: DEBUG nova.objects.base [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2397.912695] env[63279]: DEBUG oslo_vmware.api [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]521fb0ed-fefd-b86d-105d-78e3a9f73f83, 'name': SearchDatastore_Task, 'duration_secs': 0.007087} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2397.912695] env[63279]: DEBUG oslo_concurrency.lockutils [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2398.051804] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104194f6-4917-47cc-957c-cbb43cff92cc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.059162] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332a7c07-5ee5-4bad-bbf2-adc6a345e1f0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.101872] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b52e97-bfcc-44fd-b690-2f9a205be152 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.109812] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2431ac-f65d-476b-8ba7-e76703f63790 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.126660] env[63279]: DEBUG nova.compute.provider_tree [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2398.163048] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2398.164213] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2398.164213] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d852bb-a4aa-46e7-905a-e2b386d72a04 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.183798] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e3263c-d96d-4dbe-a6ad-b7855cd03a25 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.212145] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Reconfiguring VM to detach interface {{(pid=63279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 2398.212467] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b34ae11-4669-4369-9333-fb71495ecce7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.230316] env[63279]: DEBUG oslo_vmware.api [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2398.230316] env[63279]: value = "task-2088162" [ 2398.230316] env[63279]: _type = "Task" [ 2398.230316] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2398.240066] env[63279]: DEBUG oslo_vmware.api [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088162, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2398.251102] env[63279]: DEBUG oslo_concurrency.lockutils [req-36edc0cf-8dbe-4f36-9ade-0fa6b7a6b413 req-e30f7d7d-c11e-4e59-a853-37704aef8d36 service nova] Releasing lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2398.339027] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088161, 'name': ReconfigVM_Task, 'duration_secs': 0.289126} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2398.339362] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 5961728f-c65a-425d-8f6d-5763c79bbe1a/5961728f-c65a-425d-8f6d-5763c79bbe1a.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2398.340058] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4778ad9-02c8-49be-9ef3-b99859a17e9f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.346193] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2398.346193] env[63279]: value = "task-2088163" [ 2398.346193] env[63279]: _type = "Task" [ 2398.346193] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2398.353743] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088163, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2398.630399] env[63279]: DEBUG nova.scheduler.client.report [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2398.740071] env[63279]: DEBUG oslo_vmware.api [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2398.855335] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088163, 'name': Rename_Task, 'duration_secs': 0.156232} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2398.855630] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2398.855877] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22c631d7-a4f1-4860-8205-875ba27383a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.859032] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "efddec10-b496-446e-a05a-72c9f2d86ed9" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2398.862082] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2398.862082] env[63279]: value = "task-2088164" [ 2398.862082] env[63279]: _type = "Task" [ 2398.862082] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2398.869290] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2398.911554] env[63279]: DEBUG nova.compute.manager [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2398.938438] env[63279]: DEBUG nova.virt.hardware [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2398.938681] env[63279]: DEBUG nova.virt.hardware [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2398.938844] env[63279]: DEBUG nova.virt.hardware [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2398.939066] env[63279]: DEBUG nova.virt.hardware [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2398.939228] env[63279]: DEBUG nova.virt.hardware [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2398.939367] env[63279]: DEBUG nova.virt.hardware [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2398.939570] env[63279]: DEBUG nova.virt.hardware [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2398.939733] env[63279]: DEBUG nova.virt.hardware [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2398.939899] env[63279]: DEBUG nova.virt.hardware [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2398.940076] env[63279]: DEBUG nova.virt.hardware [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2398.940256] env[63279]: DEBUG nova.virt.hardware [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2398.941226] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebe6bff-7a73-4f90-844b-d9b4428a91e5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.949494] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4d2454-fe56-4f0e-b314-ff8dcbb02463 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.134843] env[63279]: DEBUG oslo_concurrency.lockutils [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.249s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2399.137688] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.178s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2399.242831] env[63279]: DEBUG oslo_vmware.api [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2399.373474] env[63279]: DEBUG oslo_vmware.api [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088164, 'name': PowerOnVM_Task, 'duration_secs': 0.412013} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2399.373908] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2399.374193] env[63279]: INFO nova.compute.manager [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Took 7.39 seconds to spawn the instance on the hypervisor. [ 2399.374954] env[63279]: DEBUG nova.compute.manager [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2399.375939] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224dd0e9-68a4-4b07-b3e2-32e46fbb9e89 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.387292] env[63279]: DEBUG nova.compute.manager [req-6668cf14-f8b5-477b-9acc-ad6bb90871c3 req-a25c0fbd-2431-42f8-91dc-b0d48eb8887c service nova] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Received event network-vif-plugged-c72218a8-035f-4947-b145-503e00034664 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2399.387566] env[63279]: DEBUG oslo_concurrency.lockutils [req-6668cf14-f8b5-477b-9acc-ad6bb90871c3 req-a25c0fbd-2431-42f8-91dc-b0d48eb8887c service nova] Acquiring lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2399.387865] env[63279]: DEBUG oslo_concurrency.lockutils [req-6668cf14-f8b5-477b-9acc-ad6bb90871c3 req-a25c0fbd-2431-42f8-91dc-b0d48eb8887c service nova] Lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2399.388079] env[63279]: DEBUG oslo_concurrency.lockutils [req-6668cf14-f8b5-477b-9acc-ad6bb90871c3 req-a25c0fbd-2431-42f8-91dc-b0d48eb8887c service nova] Lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2399.388314] env[63279]: DEBUG nova.compute.manager [req-6668cf14-f8b5-477b-9acc-ad6bb90871c3 req-a25c0fbd-2431-42f8-91dc-b0d48eb8887c service nova] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] No waiting events found dispatching network-vif-plugged-c72218a8-035f-4947-b145-503e00034664 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2399.388541] env[63279]: WARNING nova.compute.manager [req-6668cf14-f8b5-477b-9acc-ad6bb90871c3 req-a25c0fbd-2431-42f8-91dc-b0d48eb8887c service nova] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Received unexpected event network-vif-plugged-c72218a8-035f-4947-b145-503e00034664 for instance with vm_state building and task_state spawning. [ 2399.471861] env[63279]: DEBUG nova.network.neutron [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Successfully updated port: c72218a8-035f-4947-b145-503e00034664 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2399.651052] env[63279]: DEBUG oslo_concurrency.lockutils [None req-43bd4868-7097-4981-87a8-1d823ea75ab9 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 29.589s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2399.651877] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.793s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2399.652180] env[63279]: INFO nova.compute.manager [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Unshelving [ 2399.740844] env[63279]: DEBUG oslo_vmware.api [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2399.897230] env[63279]: INFO nova.compute.manager [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Took 12.16 seconds to build instance. [ 2399.974686] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "refresh_cache-6e8d9959-7c9d-48ee-81a5-bbdc6234248f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2399.974985] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "refresh_cache-6e8d9959-7c9d-48ee-81a5-bbdc6234248f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2399.974985] env[63279]: DEBUG nova.network.neutron [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2400.150850] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Applying migration context for instance fcdd3076-2b53-4850-9730-2f877e2cabfd as it has an incoming, in-progress migration 745b9969-60e3-4261-828f-50d29b1a6544. Migration status is confirming {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2400.152943] env[63279]: INFO nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating resource usage from migration 745b9969-60e3-4261-828f-50d29b1a6544 [ 2400.173620] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 69b3269a-2ba3-4f5f-a29c-62518c93da3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2400.173781] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 19e10ee4-99d1-44b9-9354-4c162d541a1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2400.173907] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 6f839780-be92-4d99-a96d-1fc14c819599 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2400.174039] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 36354325-dee0-406e-8eb6-bc3cf347a403 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2400.174165] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance befc985f-68e2-4a04-8de0-9ca9bb3fa504 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2400.174283] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 246b5346-93b1-4a84-921c-d028f3554d3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2400.174400] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Migration 745b9969-60e3-4261-828f-50d29b1a6544 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2400.174513] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance fcdd3076-2b53-4850-9730-2f877e2cabfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2400.174626] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 5961728f-c65a-425d-8f6d-5763c79bbe1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2400.174740] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 6e8d9959-7c9d-48ee-81a5-bbdc6234248f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2400.174893] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Instance with task_state "unshelving" is not being actively managed by this compute host but has allocations referencing this compute node (0ba7c625-a0fc-4d3c-b804-196d00f00137): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 2400.175102] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2400.175254] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2400.243974] env[63279]: DEBUG oslo_vmware.api [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2400.324123] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1115b6-1acb-4a7f-93c9-666b8d37478b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.331183] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ead79b-5991-4ca9-ae5c-1fd711550d61 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.362201] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acfc37d0-a248-4736-84f2-4d3c31dc2e9d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.369708] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e07aee-8b69-4717-9dd5-2fc2ac7a1274 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.382780] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2400.398701] env[63279]: DEBUG oslo_concurrency.lockutils [None req-263bc8f6-2264-4eb6-9d9d-c26895c369d4 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "5961728f-c65a-425d-8f6d-5763c79bbe1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.665s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2400.507969] env[63279]: DEBUG nova.network.neutron [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2400.645429] env[63279]: DEBUG nova.network.neutron [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Updating instance_info_cache with network_info: [{"id": "c72218a8-035f-4947-b145-503e00034664", "address": "fa:16:3e:4d:33:e4", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc72218a8-03", "ovs_interfaceid": "c72218a8-035f-4947-b145-503e00034664", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2400.673494] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2400.742933] env[63279]: DEBUG oslo_vmware.api [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2400.886430] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2401.052803] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f0461e11-dee6-4d19-962c-f2eaf5b195c0 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "5961728f-c65a-425d-8f6d-5763c79bbe1a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2401.053101] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f0461e11-dee6-4d19-962c-f2eaf5b195c0 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "5961728f-c65a-425d-8f6d-5763c79bbe1a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2401.053304] env[63279]: DEBUG nova.compute.manager [None req-f0461e11-dee6-4d19-962c-f2eaf5b195c0 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2401.054237] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b30667-0df3-4dd6-af02-cd5a0d21e991 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.060824] env[63279]: DEBUG nova.compute.manager [None req-f0461e11-dee6-4d19-962c-f2eaf5b195c0 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2401.061451] env[63279]: DEBUG nova.objects.instance [None req-f0461e11-dee6-4d19-962c-f2eaf5b195c0 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lazy-loading 'flavor' on Instance uuid 5961728f-c65a-425d-8f6d-5763c79bbe1a {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2401.149199] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "refresh_cache-6e8d9959-7c9d-48ee-81a5-bbdc6234248f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2401.149544] env[63279]: DEBUG nova.compute.manager [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Instance network_info: |[{"id": "c72218a8-035f-4947-b145-503e00034664", "address": "fa:16:3e:4d:33:e4", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc72218a8-03", "ovs_interfaceid": "c72218a8-035f-4947-b145-503e00034664", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2401.149985] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:33:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c72218a8-035f-4947-b145-503e00034664', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2401.158747] env[63279]: DEBUG oslo.service.loopingcall [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2401.159011] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2401.159383] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75ed9585-0956-4f6b-8bb8-22dcc5b32086 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.178993] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2401.178993] env[63279]: value = "task-2088165" [ 2401.178993] env[63279]: _type = "Task" [ 2401.178993] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2401.188084] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088165, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2401.245136] env[63279]: DEBUG oslo_vmware.api [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2401.392435] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2401.392435] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.254s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2401.392435] env[63279]: DEBUG oslo_concurrency.lockutils [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.479s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2401.393551] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2401.393755] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Cleaning up deleted instances {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11747}} [ 2401.486418] env[63279]: DEBUG nova.compute.manager [req-5cfb418b-37b6-48d3-8038-2043a594f803 req-e09be566-e1d9-4e5c-8945-b3d31fab4894 service nova] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Received event network-changed-c72218a8-035f-4947-b145-503e00034664 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2401.486602] env[63279]: DEBUG nova.compute.manager [req-5cfb418b-37b6-48d3-8038-2043a594f803 req-e09be566-e1d9-4e5c-8945-b3d31fab4894 service nova] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Refreshing instance network info cache due to event network-changed-c72218a8-035f-4947-b145-503e00034664. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2401.487201] env[63279]: DEBUG oslo_concurrency.lockutils [req-5cfb418b-37b6-48d3-8038-2043a594f803 req-e09be566-e1d9-4e5c-8945-b3d31fab4894 service nova] Acquiring lock "refresh_cache-6e8d9959-7c9d-48ee-81a5-bbdc6234248f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2401.487420] env[63279]: DEBUG oslo_concurrency.lockutils [req-5cfb418b-37b6-48d3-8038-2043a594f803 req-e09be566-e1d9-4e5c-8945-b3d31fab4894 service nova] Acquired lock "refresh_cache-6e8d9959-7c9d-48ee-81a5-bbdc6234248f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2401.487622] env[63279]: DEBUG nova.network.neutron [req-5cfb418b-37b6-48d3-8038-2043a594f803 req-e09be566-e1d9-4e5c-8945-b3d31fab4894 service nova] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Refreshing network info cache for port c72218a8-035f-4947-b145-503e00034664 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2401.689364] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088165, 'name': CreateVM_Task, 'duration_secs': 0.369751} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2401.689364] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2401.690015] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2401.690197] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2401.690510] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2401.690760] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a73f94f2-0583-4614-bba0-8c3515855e05 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.695778] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2401.695778] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5206d772-0e51-a8bc-f62d-a6897906f4db" [ 2401.695778] env[63279]: _type = "Task" [ 2401.695778] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2401.703360] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5206d772-0e51-a8bc-f62d-a6897906f4db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2401.745068] env[63279]: DEBUG oslo_vmware.api [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2401.910638] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] There are 54 instances to clean {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11756}} [ 2401.910792] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 211f30e0-f5ab-4c05-975a-63fdc4fed7c7] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2402.050747] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7caa23d8-c4ff-4632-99c2-56db38cb811f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.058145] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd11a64-b0d5-4418-bee8-c28e30b937a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.088188] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0461e11-dee6-4d19-962c-f2eaf5b195c0 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2402.091195] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc662d46-034d-48cc-80f3-eadf54e80e48 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.093097] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669f00ac-0d49-4c75-8fae-326ab3e52410 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.101542] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0169335c-17db-4080-9206-e5ea63c17b8e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.105094] env[63279]: DEBUG oslo_vmware.api [None req-f0461e11-dee6-4d19-962c-f2eaf5b195c0 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2402.105094] env[63279]: value = "task-2088166" [ 2402.105094] env[63279]: _type = "Task" [ 2402.105094] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2402.115758] env[63279]: DEBUG nova.compute.provider_tree [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2402.123761] env[63279]: DEBUG oslo_vmware.api [None req-f0461e11-dee6-4d19-962c-f2eaf5b195c0 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088166, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2402.208337] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5206d772-0e51-a8bc-f62d-a6897906f4db, 'name': SearchDatastore_Task, 'duration_secs': 0.008177} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2402.208677] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2402.208928] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2402.209198] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2402.209366] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2402.209562] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2402.209830] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b633633a-2a5f-41b5-8ee8-3d6af8055c60 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.218564] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2402.218746] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2402.219492] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fadeac12-05d0-4ec0-89cd-de7cae24759b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.224807] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2402.224807] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5228d76b-2f29-b774-9cbb-a5701d386148" [ 2402.224807] env[63279]: _type = "Task" [ 2402.224807] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2402.232046] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5228d76b-2f29-b774-9cbb-a5701d386148, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2402.234948] env[63279]: DEBUG nova.network.neutron [req-5cfb418b-37b6-48d3-8038-2043a594f803 req-e09be566-e1d9-4e5c-8945-b3d31fab4894 service nova] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Updated VIF entry in instance network info cache for port c72218a8-035f-4947-b145-503e00034664. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2402.235309] env[63279]: DEBUG nova.network.neutron [req-5cfb418b-37b6-48d3-8038-2043a594f803 req-e09be566-e1d9-4e5c-8945-b3d31fab4894 service nova] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Updating instance_info_cache with network_info: [{"id": "c72218a8-035f-4947-b145-503e00034664", "address": "fa:16:3e:4d:33:e4", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc72218a8-03", "ovs_interfaceid": "c72218a8-035f-4947-b145-503e00034664", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2402.244417] env[63279]: DEBUG oslo_vmware.api [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2402.414643] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 1332faad-20be-4a81-b57e-171a49d5c427] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2402.615925] env[63279]: DEBUG oslo_vmware.api [None req-f0461e11-dee6-4d19-962c-f2eaf5b195c0 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088166, 'name': PowerOffVM_Task, 'duration_secs': 0.182909} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2402.615925] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0461e11-dee6-4d19-962c-f2eaf5b195c0 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2402.616134] env[63279]: DEBUG nova.compute.manager [None req-f0461e11-dee6-4d19-962c-f2eaf5b195c0 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2402.616911] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c81198-3763-4ad9-983b-be79b05fbd37 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.621277] env[63279]: DEBUG nova.scheduler.client.report [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2402.734659] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5228d76b-2f29-b774-9cbb-a5701d386148, 'name': SearchDatastore_Task, 'duration_secs': 0.007935} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2402.735446] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6eb14a97-74b5-4940-bc51-e5c77986a102 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.740756] env[63279]: DEBUG oslo_concurrency.lockutils [req-5cfb418b-37b6-48d3-8038-2043a594f803 req-e09be566-e1d9-4e5c-8945-b3d31fab4894 service nova] Releasing lock "refresh_cache-6e8d9959-7c9d-48ee-81a5-bbdc6234248f" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2402.743195] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2402.743195] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f7bde0-5500-a4ec-8798-026d3df9e8fe" [ 2402.743195] env[63279]: _type = "Task" [ 2402.743195] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2402.746533] env[63279]: DEBUG oslo_vmware.api [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2402.755233] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f7bde0-5500-a4ec-8798-026d3df9e8fe, 'name': SearchDatastore_Task, 'duration_secs': 0.00876} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2402.755457] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2402.755701] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6e8d9959-7c9d-48ee-81a5-bbdc6234248f/6e8d9959-7c9d-48ee-81a5-bbdc6234248f.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2402.755930] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-673b4f2d-9fc6-4479-bcbe-2e61225d82dc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.762500] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2402.762500] env[63279]: value = "task-2088167" [ 2402.762500] env[63279]: _type = "Task" [ 2402.762500] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2402.769935] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088167, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2402.917914] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: b7123d9a-bcaa-43c4-ac9f-982b3b146eb0] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2403.134257] env[63279]: DEBUG oslo_concurrency.lockutils [None req-f0461e11-dee6-4d19-962c-f2eaf5b195c0 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "5961728f-c65a-425d-8f6d-5763c79bbe1a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.081s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2403.252880] env[63279]: DEBUG oslo_vmware.api [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2403.272284] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088167, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463919} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2403.272552] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6e8d9959-7c9d-48ee-81a5-bbdc6234248f/6e8d9959-7c9d-48ee-81a5-bbdc6234248f.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2403.272773] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2403.273050] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-88d82c28-4136-44ed-b73e-c6c8837d3f6b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.280277] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2403.280277] env[63279]: value = "task-2088168" [ 2403.280277] env[63279]: _type = "Task" [ 2403.280277] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2403.289202] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088168, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2403.421105] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: df37c4f6-b492-4d6e-9ba1-950dbbb9a885] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2403.636160] env[63279]: DEBUG oslo_concurrency.lockutils [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.244s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2403.639184] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.966s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2403.639411] env[63279]: DEBUG nova.objects.instance [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lazy-loading 'pci_requests' on Instance uuid efddec10-b496-446e-a05a-72c9f2d86ed9 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2403.750032] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "5961728f-c65a-425d-8f6d-5763c79bbe1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2403.750032] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "5961728f-c65a-425d-8f6d-5763c79bbe1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2403.750032] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "5961728f-c65a-425d-8f6d-5763c79bbe1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2403.750032] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "5961728f-c65a-425d-8f6d-5763c79bbe1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2403.750032] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "5961728f-c65a-425d-8f6d-5763c79bbe1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2403.751546] env[63279]: DEBUG oslo_vmware.api [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088162, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2403.751998] env[63279]: INFO nova.compute.manager [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Terminating instance [ 2403.790063] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088168, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064051} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2403.790292] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2403.791184] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82777c2b-3914-4b98-a1bd-afff644c80ae {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.813764] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 6e8d9959-7c9d-48ee-81a5-bbdc6234248f/6e8d9959-7c9d-48ee-81a5-bbdc6234248f.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2403.813998] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-658ca54e-6fc9-4496-a042-6937e7ae0801 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.834115] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2403.834115] env[63279]: value = "task-2088169" [ 2403.834115] env[63279]: _type = "Task" [ 2403.834115] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2403.841572] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088169, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2403.924992] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 3de52a94-c1de-4b37-985c-9101417260e1] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2404.147769] env[63279]: DEBUG nova.objects.instance [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lazy-loading 'numa_topology' on Instance uuid efddec10-b496-446e-a05a-72c9f2d86ed9 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2404.195963] env[63279]: INFO nova.scheduler.client.report [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleted allocation for migration 745b9969-60e3-4261-828f-50d29b1a6544 [ 2404.248760] env[63279]: DEBUG oslo_vmware.api [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088162, 'name': ReconfigVM_Task, 'duration_secs': 5.871339} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2404.249059] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2404.249337] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Reconfigured VM to detach interface {{(pid=63279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 2404.255347] env[63279]: DEBUG nova.compute.manager [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2404.255580] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2404.256380] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be56b95-55e6-41cb-b095-482d199141e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.263098] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2404.263343] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-149d763c-d958-4694-a06d-d50d3b297d1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.346966] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088169, 'name': ReconfigVM_Task, 'duration_secs': 0.442588} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2404.347268] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 6e8d9959-7c9d-48ee-81a5-bbdc6234248f/6e8d9959-7c9d-48ee-81a5-bbdc6234248f.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2404.347881] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a909ab51-fc89-4a5e-826a-4669d6e4469e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.353543] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2404.353543] env[63279]: value = "task-2088171" [ 2404.353543] env[63279]: _type = "Task" [ 2404.353543] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2404.360902] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088171, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2404.428197] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 6e452711-a79c-4560-b38f-9414c87e6683] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2404.434243] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2404.434828] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2404.434828] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleting the datastore file [datastore1] 5961728f-c65a-425d-8f6d-5763c79bbe1a {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2404.435054] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44574565-1d0a-450e-942e-801415cff841 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.441924] env[63279]: DEBUG oslo_vmware.api [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2404.441924] env[63279]: value = "task-2088172" [ 2404.441924] env[63279]: _type = "Task" [ 2404.441924] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2404.451647] env[63279]: DEBUG oslo_vmware.api [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088172, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2404.651318] env[63279]: INFO nova.compute.claims [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2404.702126] env[63279]: DEBUG oslo_concurrency.lockutils [None req-eef685fe-b99f-4100-84e0-11e59fa3a988 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.706s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2404.863034] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088171, 'name': Rename_Task, 'duration_secs': 0.131698} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2404.863313] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2404.863566] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb8ecde0-ef6c-4a92-9367-f464ba9ce103 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.870112] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2404.870112] env[63279]: value = "task-2088173" [ 2404.870112] env[63279]: _type = "Task" [ 2404.870112] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2404.878531] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088173, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2404.931861] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: d0b8c9dd-c683-4f3a-b819-d9d57d96636b] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2404.952282] env[63279]: DEBUG oslo_vmware.api [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088172, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161069} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2404.952589] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2404.952781] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2404.952965] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2404.953182] env[63279]: INFO nova.compute.manager [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Took 0.70 seconds to destroy the instance on the hypervisor. [ 2404.953516] env[63279]: DEBUG oslo.service.loopingcall [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2404.953764] env[63279]: DEBUG nova.compute.manager [-] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2404.953864] env[63279]: DEBUG nova.network.neutron [-] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2405.232500] env[63279]: DEBUG nova.compute.manager [req-1a6320fd-b438-400e-99b8-3cd5bde02525 req-a1e5cfb7-aa06-488f-90c8-6dc0427585bb service nova] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Received event network-vif-deleted-92ca2582-9c5e-4cf7-bdf0-a0135ce33065 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2405.232719] env[63279]: INFO nova.compute.manager [req-1a6320fd-b438-400e-99b8-3cd5bde02525 req-a1e5cfb7-aa06-488f-90c8-6dc0427585bb service nova] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Neutron deleted interface 92ca2582-9c5e-4cf7-bdf0-a0135ce33065; detaching it from the instance and deleting it from the info cache [ 2405.232896] env[63279]: DEBUG nova.network.neutron [req-1a6320fd-b438-400e-99b8-3cd5bde02525 req-a1e5cfb7-aa06-488f-90c8-6dc0427585bb service nova] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2405.379307] env[63279]: DEBUG oslo_vmware.api [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088173, 'name': PowerOnVM_Task, 'duration_secs': 0.450148} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2405.379577] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2405.379784] env[63279]: INFO nova.compute.manager [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Took 6.47 seconds to spawn the instance on the hypervisor. [ 2405.379967] env[63279]: DEBUG nova.compute.manager [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2405.380739] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56225c32-68c6-459a-bd76-5d539204a79f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.437041] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 69c88844-84c7-4272-a2c4-051f1499df84] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2405.643645] env[63279]: DEBUG oslo_concurrency.lockutils [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2405.643941] env[63279]: DEBUG oslo_concurrency.lockutils [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2405.644186] env[63279]: DEBUG oslo_concurrency.lockutils [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "fcdd3076-2b53-4850-9730-2f877e2cabfd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2405.644437] env[63279]: DEBUG oslo_concurrency.lockutils [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2405.644626] env[63279]: DEBUG oslo_concurrency.lockutils [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2405.647517] env[63279]: INFO nova.compute.manager [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Terminating instance [ 2405.711103] env[63279]: DEBUG nova.network.neutron [-] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2405.736332] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b18fc64f-5753-4ed3-8cb3-ce8fa58d98eb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.747867] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed736e7-3e77-468c-afc0-690549a58163 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.784111] env[63279]: DEBUG nova.compute.manager [req-1a6320fd-b438-400e-99b8-3cd5bde02525 req-a1e5cfb7-aa06-488f-90c8-6dc0427585bb service nova] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Detach interface failed, port_id=92ca2582-9c5e-4cf7-bdf0-a0135ce33065, reason: Instance 5961728f-c65a-425d-8f6d-5763c79bbe1a could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2405.817920] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2405.817920] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquired lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2405.817920] env[63279]: DEBUG nova.network.neutron [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2405.820280] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815093fe-06fc-4cb5-bcc9-2aa32ac38054 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.829262] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2e1bd0-ba1c-4741-8175-08aa8b39373a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.860557] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c87797-4906-4938-94c4-787fb8cb5759 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.869440] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f97224-40e9-410c-a655-58a08ad9af73 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.883163] env[63279]: DEBUG nova.compute.provider_tree [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2405.900451] env[63279]: INFO nova.compute.manager [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Took 11.29 seconds to build instance. [ 2405.940166] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 1fca4e5c-fe2c-4b61-bed4-52c7770def7c] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2406.081798] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "36354325-dee0-406e-8eb6-bc3cf347a403" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2406.082061] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "36354325-dee0-406e-8eb6-bc3cf347a403" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2406.082303] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "36354325-dee0-406e-8eb6-bc3cf347a403-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2406.082500] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "36354325-dee0-406e-8eb6-bc3cf347a403-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2406.082767] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "36354325-dee0-406e-8eb6-bc3cf347a403-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2406.085112] env[63279]: INFO nova.compute.manager [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Terminating instance [ 2406.151522] env[63279]: DEBUG nova.compute.manager [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2406.151753] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2406.152739] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff2c041-85b8-47ce-be01-82099da1c508 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.161094] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2406.161350] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-640b0fb8-79fa-4776-889e-848063109b24 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.167836] env[63279]: DEBUG oslo_vmware.api [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2406.167836] env[63279]: value = "task-2088174" [ 2406.167836] env[63279]: _type = "Task" [ 2406.167836] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2406.177511] env[63279]: DEBUG oslo_vmware.api [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088174, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2406.214299] env[63279]: INFO nova.compute.manager [-] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Took 1.26 seconds to deallocate network for instance. [ 2406.386872] env[63279]: DEBUG nova.scheduler.client.report [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2406.402257] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c0b8ad2-bbf9-437d-83a5-0240f7c3c8cc tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.802s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2406.443234] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: ecec02e8-8ddf-4997-9547-ccee0db1938b] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2406.539420] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ad4a251b-add7-4880-b3fc-b1f92eae186e tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2406.539705] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ad4a251b-add7-4880-b3fc-b1f92eae186e tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2406.539896] env[63279]: DEBUG nova.compute.manager [None req-ad4a251b-add7-4880-b3fc-b1f92eae186e tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2406.540970] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04bec9b-b95d-4e40-8088-fee69d6b5fca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.548164] env[63279]: DEBUG nova.compute.manager [None req-ad4a251b-add7-4880-b3fc-b1f92eae186e tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2406.548754] env[63279]: DEBUG nova.objects.instance [None req-ad4a251b-add7-4880-b3fc-b1f92eae186e tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lazy-loading 'flavor' on Instance uuid 6e8d9959-7c9d-48ee-81a5-bbdc6234248f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2406.570102] env[63279]: INFO nova.network.neutron [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Port 5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 2406.570275] env[63279]: DEBUG nova.network.neutron [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Updating instance_info_cache with network_info: [{"id": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "address": "fa:16:3e:8e:d1:f4", "network": {"id": "e0e614b7-de4b-485e-8824-582faae8febd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1102025073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b21bc5072e4945a19a782dd9561709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa05a3f68-d8", "ovs_interfaceid": "a05a3f68-d80e-46c9-9065-1f803e7c04ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2406.588541] env[63279]: DEBUG nova.compute.manager [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2406.588752] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2406.589883] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80faa1f8-87c3-4f5e-997c-6bc23d0f2c91 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.597936] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2406.598199] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-000ebef7-12f2-4992-8dae-6ec224802d34 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.605399] env[63279]: DEBUG oslo_vmware.api [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2406.605399] env[63279]: value = "task-2088175" [ 2406.605399] env[63279]: _type = "Task" [ 2406.605399] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2406.613801] env[63279]: DEBUG oslo_vmware.api [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088175, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2406.678220] env[63279]: DEBUG oslo_vmware.api [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088174, 'name': PowerOffVM_Task, 'duration_secs': 0.226898} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2406.678501] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2406.678695] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2406.678972] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f475846c-81be-4ef2-9840-13978becbe3d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.720880] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2406.893058] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.253s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2406.894970] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.174s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2406.895245] env[63279]: DEBUG nova.objects.instance [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lazy-loading 'resources' on Instance uuid 5961728f-c65a-425d-8f6d-5763c79bbe1a {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2406.921778] env[63279]: INFO nova.network.neutron [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Updating port cfd0d031-3ccd-4ad0-9c99-6436a05c108d with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2406.942384] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2406.942609] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2406.942797] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleting the datastore file [datastore1] fcdd3076-2b53-4850-9730-2f877e2cabfd {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2406.943067] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f81d968-a6cc-4936-97ba-6b37ba5b9484 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.946496] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 21aca39e-8513-49bd-93e9-0d101cee591f] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2406.949455] env[63279]: DEBUG oslo_vmware.api [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2406.949455] env[63279]: value = "task-2088177" [ 2406.949455] env[63279]: _type = "Task" [ 2406.949455] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2406.958306] env[63279]: DEBUG oslo_vmware.api [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088177, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2407.073023] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Releasing lock "refresh_cache-36354325-dee0-406e-8eb6-bc3cf347a403" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2407.115739] env[63279]: DEBUG oslo_vmware.api [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088175, 'name': PowerOffVM_Task, 'duration_secs': 0.200736} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2407.116016] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2407.116224] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2407.116460] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-344e3d72-398a-47c0-a4cd-6085d7eb743a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.451911] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 6b4a94f9-1cd9-466f-844f-8d692b732abc] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2407.462763] env[63279]: DEBUG oslo_vmware.api [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088177, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181351} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2407.465041] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2407.465248] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2407.465430] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2407.465606] env[63279]: INFO nova.compute.manager [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Took 1.31 seconds to destroy the instance on the hypervisor. [ 2407.465847] env[63279]: DEBUG oslo.service.loopingcall [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2407.466214] env[63279]: DEBUG nova.compute.manager [-] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2407.466314] env[63279]: DEBUG nova.network.neutron [-] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2407.529817] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03c2fef-124b-47fc-89ca-fc98f2de3709 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.537369] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b04a1c-00e2-469e-8021-ff43e0c4f01d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.570455] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad4a251b-add7-4880-b3fc-b1f92eae186e tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2407.570997] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3d8c996-0360-44d6-a24d-83ced062b463 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.572976] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184d29e8-3416-49e4-acb3-a5e6f5032fbe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.576463] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c4be1975-3675-40a6-8f51-cbd564231322 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "interface-36354325-dee0-406e-8eb6-bc3cf347a403-5426dc4d-6f5e-46fc-9a7e-c3ba98bdc032" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.919s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2407.581108] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8359648e-f7b3-4ce3-8ad2-35e8c9c844bf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.585595] env[63279]: DEBUG oslo_vmware.api [None req-ad4a251b-add7-4880-b3fc-b1f92eae186e tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2407.585595] env[63279]: value = "task-2088179" [ 2407.585595] env[63279]: _type = "Task" [ 2407.585595] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2407.598778] env[63279]: DEBUG nova.compute.provider_tree [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2407.605589] env[63279]: DEBUG oslo_vmware.api [None req-ad4a251b-add7-4880-b3fc-b1f92eae186e tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088179, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2407.957873] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 48794f65-355c-4cad-a83f-6b8cc327dd4d] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2408.015644] env[63279]: DEBUG nova.compute.manager [req-de61ca9f-0c24-45c8-b3b6-9757c3a99fce req-90e4e34b-04ec-4a24-b044-b8f444be067b service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Received event network-vif-deleted-7baa1106-63c3-49cd-8d80-54c763d1b2b3 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2408.015761] env[63279]: INFO nova.compute.manager [req-de61ca9f-0c24-45c8-b3b6-9757c3a99fce req-90e4e34b-04ec-4a24-b044-b8f444be067b service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Neutron deleted interface 7baa1106-63c3-49cd-8d80-54c763d1b2b3; detaching it from the instance and deleting it from the info cache [ 2408.015889] env[63279]: DEBUG nova.network.neutron [req-de61ca9f-0c24-45c8-b3b6-9757c3a99fce req-90e4e34b-04ec-4a24-b044-b8f444be067b service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2408.095725] env[63279]: DEBUG oslo_vmware.api [None req-ad4a251b-add7-4880-b3fc-b1f92eae186e tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088179, 'name': PowerOffVM_Task, 'duration_secs': 0.186295} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2408.095967] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad4a251b-add7-4880-b3fc-b1f92eae186e tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2408.096188] env[63279]: DEBUG nova.compute.manager [None req-ad4a251b-add7-4880-b3fc-b1f92eae186e tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2408.097108] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f45758e-4ba8-4554-9a90-664947285d54 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.103231] env[63279]: DEBUG nova.scheduler.client.report [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2408.423153] env[63279]: DEBUG nova.network.neutron [-] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2408.461503] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 9a6abf10-ed12-49c6-aa16-f428da9f9f9d] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2408.518920] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0130d76e-1900-45d5-98e6-b9760d8d97b0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.527979] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5017d2-c064-424b-9542-7dd7e22fa75d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.561255] env[63279]: DEBUG nova.compute.manager [req-de61ca9f-0c24-45c8-b3b6-9757c3a99fce req-90e4e34b-04ec-4a24-b044-b8f444be067b service nova] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Detach interface failed, port_id=7baa1106-63c3-49cd-8d80-54c763d1b2b3, reason: Instance fcdd3076-2b53-4850-9730-2f877e2cabfd could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2408.609389] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.714s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2408.614400] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ad4a251b-add7-4880-b3fc-b1f92eae186e tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.074s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2408.635416] env[63279]: INFO nova.scheduler.client.report [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleted allocations for instance 5961728f-c65a-425d-8f6d-5763c79bbe1a [ 2408.926514] env[63279]: INFO nova.compute.manager [-] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Took 1.46 seconds to deallocate network for instance. [ 2408.965129] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: f418ff3d-8a5f-4d78-994a-e40b62cfcdd6] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2409.044660] env[63279]: INFO nova.compute.manager [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Rebuilding instance [ 2409.085638] env[63279]: DEBUG nova.compute.manager [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2409.086530] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0b727c-bd18-4333-a41d-36a9278fe2e8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.143123] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dc2da8df-ea63-4b6a-b59c-8ee87f00a3f6 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "5961728f-c65a-425d-8f6d-5763c79bbe1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.394s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2409.432976] env[63279]: DEBUG oslo_concurrency.lockutils [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2409.433162] env[63279]: DEBUG oslo_concurrency.lockutils [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2409.433392] env[63279]: DEBUG oslo_concurrency.lockutils [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2409.452393] env[63279]: INFO nova.scheduler.client.report [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleted allocations for instance fcdd3076-2b53-4850-9730-2f877e2cabfd [ 2409.468155] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 59ad6180-e561-41e3-86e4-37fc20819578] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2409.901107] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "69b3269a-2ba3-4f5f-a29c-62518c93da3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2409.901414] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "69b3269a-2ba3-4f5f-a29c-62518c93da3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2409.901637] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "69b3269a-2ba3-4f5f-a29c-62518c93da3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2409.901845] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "69b3269a-2ba3-4f5f-a29c-62518c93da3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2409.902037] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "69b3269a-2ba3-4f5f-a29c-62518c93da3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2409.904113] env[63279]: INFO nova.compute.manager [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Terminating instance [ 2409.958445] env[63279]: DEBUG oslo_concurrency.lockutils [None req-127fac77-4046-48cd-a53b-0f31a24b9d1c tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "fcdd3076-2b53-4850-9730-2f877e2cabfd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.314s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2409.970980] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 9fd3ea14-3e25-47f4-ae84-1b1fb8b1bbf6] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2410.103846] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2410.104202] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1246933f-e23a-422d-bcab-52e9398a4559 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.111818] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2410.111818] env[63279]: value = "task-2088180" [ 2410.111818] env[63279]: _type = "Task" [ 2410.111818] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2410.119939] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088180, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2410.408791] env[63279]: DEBUG nova.compute.manager [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2410.409092] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2410.410106] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11a8225-a19e-4f29-899c-44146b29831a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.418361] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2410.418663] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca8de374-fbf3-4ad2-8c29-06267211ce07 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.424438] env[63279]: DEBUG oslo_vmware.api [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2410.424438] env[63279]: value = "task-2088181" [ 2410.424438] env[63279]: _type = "Task" [ 2410.424438] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2410.433327] env[63279]: DEBUG oslo_vmware.api [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088181, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2410.474461] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: f18d3019-bf1f-4519-a824-7ca80458d793] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2410.624017] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] VM already powered off {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2410.624339] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2410.625139] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef3ea90-3b5f-40c2-b6f0-4dd55ad30d95 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.632393] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2410.632570] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-953645ed-c73d-4bae-b404-f74742f07506 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.641587] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2410.641810] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2410.934877] env[63279]: DEBUG oslo_vmware.api [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088181, 'name': PowerOffVM_Task, 'duration_secs': 0.212404} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2410.935066] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2410.935117] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2410.935340] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ebcad64-b0f9-40ac-a950-91359354cf9e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.977537] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 14c698c8-7459-4843-bb19-f915742e3e53] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2411.144569] env[63279]: DEBUG nova.compute.manager [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2411.481320] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 27e2917d-3cd0-4ad3-ab65-f85f7d97225f] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2411.666909] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2411.667203] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2411.669025] env[63279]: INFO nova.compute.claims [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2411.984848] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 9b98a316-71da-45fb-b895-553f179fe7d9] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2412.487857] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 2fba60ba-7ef0-4e61-9f7d-147cad8ab7ed] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2412.792576] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2abee48-be34-4ead-a3d9-c1f165cb4635 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.801479] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e48f72-82aa-4625-a1d1-cbd0cfa06ff7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.830817] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0103ca2-c2e0-4b61-98bf-965e01485201 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.837937] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e2db5b-5215-4f66-b18e-d98b586ca242 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.850734] env[63279]: DEBUG nova.compute.provider_tree [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2412.991690] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 75f5b49f-14e7-4a8e-a0cb-b955edc13dd5] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2413.353993] env[63279]: DEBUG nova.scheduler.client.report [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2413.495592] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 686a7ce2-2d07-411e-91d6-0471c55c3728] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2413.729500] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2413.729780] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2413.729979] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "19e10ee4-99d1-44b9-9354-4c162d541a1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2413.730226] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2413.730447] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2413.734147] env[63279]: INFO nova.compute.manager [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Terminating instance [ 2413.858440] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.191s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2413.858970] env[63279]: DEBUG nova.compute.manager [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2413.998662] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 736ab3ac-b9e0-4f9e-885b-765ca7a92ed0] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2414.238192] env[63279]: DEBUG nova.compute.manager [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2414.238443] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2414.239389] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc182339-1e99-4914-88ff-c6113b0ca41d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.247153] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2414.247417] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6698aae0-e9e7-487d-8840-1c52e1362b47 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.254052] env[63279]: DEBUG oslo_vmware.api [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2414.254052] env[63279]: value = "task-2088184" [ 2414.254052] env[63279]: _type = "Task" [ 2414.254052] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2414.262651] env[63279]: DEBUG oslo_vmware.api [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088184, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2414.268081] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2414.268296] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2414.268486] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Deleting the datastore file [datastore1] 36354325-dee0-406e-8eb6-bc3cf347a403 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2414.268763] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1fb37438-7880-47c4-a16d-8da60a5d4520 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.275019] env[63279]: DEBUG oslo_vmware.api [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2414.275019] env[63279]: value = "task-2088185" [ 2414.275019] env[63279]: _type = "Task" [ 2414.275019] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2414.283120] env[63279]: DEBUG oslo_vmware.api [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2414.364404] env[63279]: DEBUG nova.compute.utils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2414.365648] env[63279]: DEBUG nova.compute.manager [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2414.365798] env[63279]: DEBUG nova.network.neutron [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2414.405583] env[63279]: DEBUG nova.policy [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae6ae670025f406e8bd0362124749c43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f39174e9ff5649e0ade4391da383dfb2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2414.501648] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 4acfb474-d861-467a-983c-0dd5641e66f3] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2414.546891] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2414.547228] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2414.547508] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleting the datastore file [datastore1] 6e8d9959-7c9d-48ee-81a5-bbdc6234248f {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2414.547817] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5fd6a8c5-b0fb-4f12-9159-92d76faaa4f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.556679] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2414.556679] env[63279]: value = "task-2088186" [ 2414.556679] env[63279]: _type = "Task" [ 2414.556679] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2414.567416] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088186, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2414.568738] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2414.568963] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2414.569181] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleting the datastore file [datastore1] 69b3269a-2ba3-4f5f-a29c-62518c93da3d {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2414.569447] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87ef97f3-5513-499c-bd11-c0c51667c572 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.576428] env[63279]: DEBUG oslo_vmware.api [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for the task: (returnval){ [ 2414.576428] env[63279]: value = "task-2088187" [ 2414.576428] env[63279]: _type = "Task" [ 2414.576428] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2414.584561] env[63279]: DEBUG oslo_vmware.api [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088187, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2414.725204] env[63279]: DEBUG nova.network.neutron [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Successfully created port: 548d4882-d674-412a-9b89-4691e9eda165 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2414.764404] env[63279]: DEBUG oslo_vmware.api [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088184, 'name': PowerOffVM_Task, 'duration_secs': 0.203924} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2414.764687] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2414.764862] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2414.765171] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ff2234a-f89d-4baa-8d38-f7529391b59f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.784754] env[63279]: DEBUG oslo_vmware.api [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088185, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239562} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2414.784988] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2414.785207] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2414.785399] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2414.785580] env[63279]: INFO nova.compute.manager [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Took 8.20 seconds to destroy the instance on the hypervisor. [ 2414.785820] env[63279]: DEBUG oslo.service.loopingcall [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2414.786016] env[63279]: DEBUG nova.compute.manager [-] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2414.786116] env[63279]: DEBUG nova.network.neutron [-] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2414.865256] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2414.865485] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2414.865670] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Deleting the datastore file [datastore1] 19e10ee4-99d1-44b9-9354-4c162d541a1f {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2414.865945] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-10891539-efad-473b-bca4-a8bf49a69c77 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.868090] env[63279]: DEBUG nova.compute.manager [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2414.877698] env[63279]: DEBUG oslo_vmware.api [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2414.877698] env[63279]: value = "task-2088189" [ 2414.877698] env[63279]: _type = "Task" [ 2414.877698] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2414.886128] env[63279]: DEBUG oslo_vmware.api [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088189, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2415.006030] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 861e4118-6134-40cf-91cb-865b6ee9f347] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2415.067885] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088186, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157874} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2415.068188] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2415.068441] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2415.068677] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2415.090601] env[63279]: DEBUG oslo_vmware.api [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Task: {'id': task-2088187, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161044} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2415.090601] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2415.090601] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2415.090842] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2415.091159] env[63279]: INFO nova.compute.manager [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Took 4.68 seconds to destroy the instance on the hypervisor. [ 2415.091570] env[63279]: DEBUG oslo.service.loopingcall [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2415.091893] env[63279]: DEBUG nova.compute.manager [-] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2415.092091] env[63279]: DEBUG nova.network.neutron [-] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2415.393075] env[63279]: DEBUG oslo_vmware.api [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088189, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.22491} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2415.393487] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2415.393598] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2415.393916] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2415.394056] env[63279]: INFO nova.compute.manager [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2415.395783] env[63279]: DEBUG oslo.service.loopingcall [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2415.395783] env[63279]: DEBUG nova.compute.manager [-] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2415.395783] env[63279]: DEBUG nova.network.neutron [-] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2415.426487] env[63279]: DEBUG nova.compute.manager [req-57aab6f8-ab43-4931-90f9-641aeb66054f req-0dcc266e-7792-465a-a0e1-e39d013585af service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Received event network-vif-plugged-cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2415.426717] env[63279]: DEBUG oslo_concurrency.lockutils [req-57aab6f8-ab43-4931-90f9-641aeb66054f req-0dcc266e-7792-465a-a0e1-e39d013585af service nova] Acquiring lock "efddec10-b496-446e-a05a-72c9f2d86ed9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2415.426937] env[63279]: DEBUG oslo_concurrency.lockutils [req-57aab6f8-ab43-4931-90f9-641aeb66054f req-0dcc266e-7792-465a-a0e1-e39d013585af service nova] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2415.427129] env[63279]: DEBUG oslo_concurrency.lockutils [req-57aab6f8-ab43-4931-90f9-641aeb66054f req-0dcc266e-7792-465a-a0e1-e39d013585af service nova] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2415.427313] env[63279]: DEBUG nova.compute.manager [req-57aab6f8-ab43-4931-90f9-641aeb66054f req-0dcc266e-7792-465a-a0e1-e39d013585af service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] No waiting events found dispatching network-vif-plugged-cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2415.427488] env[63279]: WARNING nova.compute.manager [req-57aab6f8-ab43-4931-90f9-641aeb66054f req-0dcc266e-7792-465a-a0e1-e39d013585af service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Received unexpected event network-vif-plugged-cfd0d031-3ccd-4ad0-9c99-6436a05c108d for instance with vm_state shelved_offloaded and task_state spawning. [ 2415.509310] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: a6758131-030e-4b33-a2c9-8864055a5bec] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2415.539105] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2415.539320] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2415.539887] env[63279]: DEBUG nova.network.neutron [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2415.837845] env[63279]: DEBUG nova.compute.manager [req-1f6574c6-5859-40c6-92f1-9d73704f69d3 req-fc6f4d14-41a3-40d0-b6ed-6c81860dfcf8 service nova] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Received event network-vif-deleted-482c0084-ab15-4c03-beb3-585f9e66c683 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2415.838043] env[63279]: INFO nova.compute.manager [req-1f6574c6-5859-40c6-92f1-9d73704f69d3 req-fc6f4d14-41a3-40d0-b6ed-6c81860dfcf8 service nova] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Neutron deleted interface 482c0084-ab15-4c03-beb3-585f9e66c683; detaching it from the instance and deleting it from the info cache [ 2415.838206] env[63279]: DEBUG nova.network.neutron [req-1f6574c6-5859-40c6-92f1-9d73704f69d3 req-fc6f4d14-41a3-40d0-b6ed-6c81860dfcf8 service nova] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2415.883825] env[63279]: DEBUG nova.compute.manager [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2415.911361] env[63279]: DEBUG nova.virt.hardware [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2415.911640] env[63279]: DEBUG nova.virt.hardware [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2415.911804] env[63279]: DEBUG nova.virt.hardware [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2415.911991] env[63279]: DEBUG nova.virt.hardware [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2415.912225] env[63279]: DEBUG nova.virt.hardware [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2415.912379] env[63279]: DEBUG nova.virt.hardware [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2415.912614] env[63279]: DEBUG nova.virt.hardware [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2415.912796] env[63279]: DEBUG nova.virt.hardware [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2415.912974] env[63279]: DEBUG nova.virt.hardware [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2415.913155] env[63279]: DEBUG nova.virt.hardware [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2415.913343] env[63279]: DEBUG nova.virt.hardware [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2415.914678] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0aad0e-b04b-4eb0-826d-8b94650feee4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.923256] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b69bdd-f1c9-4919-bfc9-81828d03a637 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.012846] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 4ce17bdb-4bed-4e06-af13-e4097b55e17d] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2416.107155] env[63279]: DEBUG nova.virt.hardware [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2416.107403] env[63279]: DEBUG nova.virt.hardware [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2416.107603] env[63279]: DEBUG nova.virt.hardware [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2416.107747] env[63279]: DEBUG nova.virt.hardware [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2416.107898] env[63279]: DEBUG nova.virt.hardware [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2416.108063] env[63279]: DEBUG nova.virt.hardware [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2416.108577] env[63279]: DEBUG nova.virt.hardware [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2416.108786] env[63279]: DEBUG nova.virt.hardware [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2416.108967] env[63279]: DEBUG nova.virt.hardware [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2416.109158] env[63279]: DEBUG nova.virt.hardware [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2416.109339] env[63279]: DEBUG nova.virt.hardware [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2416.110476] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507790be-60f9-493a-8363-6b3c8a2991fb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.121296] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2408182d-8038-4ff5-9a41-6e362fa655f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.139270] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:33:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c72218a8-035f-4947-b145-503e00034664', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2416.146936] env[63279]: DEBUG oslo.service.loopingcall [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2416.147773] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2416.148009] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-713bab93-335a-4ca2-bba9-80de84a2f457 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.170229] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2416.170229] env[63279]: value = "task-2088190" [ 2416.170229] env[63279]: _type = "Task" [ 2416.170229] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2416.181196] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088190, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2416.212915] env[63279]: DEBUG nova.network.neutron [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Successfully updated port: 548d4882-d674-412a-9b89-4691e9eda165 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2416.285740] env[63279]: DEBUG nova.network.neutron [-] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2416.341644] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1355a0c7-7b4a-416b-92ac-c79ff1db9dbb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.355792] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b5f208c-5ddd-40ae-87a4-22b4bda787c0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.392248] env[63279]: DEBUG nova.compute.manager [req-1f6574c6-5859-40c6-92f1-9d73704f69d3 req-fc6f4d14-41a3-40d0-b6ed-6c81860dfcf8 service nova] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Detach interface failed, port_id=482c0084-ab15-4c03-beb3-585f9e66c683, reason: Instance 69b3269a-2ba3-4f5f-a29c-62518c93da3d could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2416.421420] env[63279]: DEBUG nova.network.neutron [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Updating instance_info_cache with network_info: [{"id": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "address": "fa:16:3e:ff:42:fd", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfd0d031-3c", "ovs_interfaceid": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2416.519977] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: a0697601-46ae-48ce-a3e1-3c4b81fc1f95] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2416.629703] env[63279]: DEBUG nova.network.neutron [-] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2416.680952] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088190, 'name': CreateVM_Task, 'duration_secs': 0.493815} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2416.681204] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2416.691534] env[63279]: DEBUG oslo_concurrency.lockutils [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2416.691729] env[63279]: DEBUG oslo_concurrency.lockutils [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2416.692081] env[63279]: DEBUG oslo_concurrency.lockutils [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2416.692982] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0245a02c-3d10-4190-98a9-f973758de6b3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.697433] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2416.697433] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52839cfa-88cb-1e27-c700-77be8907b4f4" [ 2416.697433] env[63279]: _type = "Task" [ 2416.697433] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2416.705587] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52839cfa-88cb-1e27-c700-77be8907b4f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2416.715684] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2416.715825] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2416.715974] env[63279]: DEBUG nova.network.neutron [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2416.792713] env[63279]: INFO nova.compute.manager [-] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Took 1.70 seconds to deallocate network for instance. [ 2416.794553] env[63279]: DEBUG nova.network.neutron [-] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2416.925287] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2416.954250] env[63279]: DEBUG nova.virt.hardware [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='777c4696595954786762239098a11abc',container_format='bare',created_at=2025-01-13T18:01:19Z,direct_url=,disk_format='vmdk',id=4faf9f78-8c3c-4094-a5ab-792d6ef75b92,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1034349099-shelved',owner='131db3d2daa24712b6e11592cf789b33',properties=ImageMetaProps,protected=,size=31670272,status='active',tags=,updated_at=2025-01-13T18:01:34Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2416.954574] env[63279]: DEBUG nova.virt.hardware [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2416.954752] env[63279]: DEBUG nova.virt.hardware [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2416.954957] env[63279]: DEBUG nova.virt.hardware [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2416.955178] env[63279]: DEBUG nova.virt.hardware [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2416.955281] env[63279]: DEBUG nova.virt.hardware [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2416.955499] env[63279]: DEBUG nova.virt.hardware [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2416.955665] env[63279]: DEBUG nova.virt.hardware [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2416.955835] env[63279]: DEBUG nova.virt.hardware [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2416.956009] env[63279]: DEBUG nova.virt.hardware [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2416.956200] env[63279]: DEBUG nova.virt.hardware [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2416.957093] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d42cd46-0669-42f4-b1e9-c736f039595b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.965487] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e451bb-f704-43f6-a0ff-371314be42a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.978617] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:42:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cfd0d031-3ccd-4ad0-9c99-6436a05c108d', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2416.986135] env[63279]: DEBUG oslo.service.loopingcall [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2416.986367] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2416.986573] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e84856e3-6fdc-44f8-a87d-e112aa33f2ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.005489] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2417.005489] env[63279]: value = "task-2088191" [ 2417.005489] env[63279]: _type = "Task" [ 2417.005489] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2417.012456] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088191, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2417.024027] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 668436f9-94e9-48c2-a3d4-3df7bbcf5775] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2417.135018] env[63279]: INFO nova.compute.manager [-] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Took 2.35 seconds to deallocate network for instance. [ 2417.207249] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52839cfa-88cb-1e27-c700-77be8907b4f4, 'name': SearchDatastore_Task, 'duration_secs': 0.011415} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2417.207551] env[63279]: DEBUG oslo_concurrency.lockutils [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2417.207776] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2417.208023] env[63279]: DEBUG oslo_concurrency.lockutils [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2417.208182] env[63279]: DEBUG oslo_concurrency.lockutils [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2417.208365] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2417.208617] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db14e33b-8c6e-4be1-afbf-b48074285ca2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.216302] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2417.216443] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2417.217106] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61d6f577-5ed5-4f20-9c4b-94001d7e1b17 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.223557] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2417.223557] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c9c945-b018-5db9-b6b2-74ef50ee8df5" [ 2417.223557] env[63279]: _type = "Task" [ 2417.223557] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2417.231142] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c9c945-b018-5db9-b6b2-74ef50ee8df5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2417.249310] env[63279]: DEBUG nova.network.neutron [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2417.296732] env[63279]: INFO nova.compute.manager [-] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Took 1.90 seconds to deallocate network for instance. [ 2417.301961] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2417.302231] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2417.302457] env[63279]: DEBUG nova.objects.instance [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lazy-loading 'resources' on Instance uuid 69b3269a-2ba3-4f5f-a29c-62518c93da3d {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2417.380613] env[63279]: DEBUG nova.network.neutron [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance_info_cache with network_info: [{"id": "548d4882-d674-412a-9b89-4691e9eda165", "address": "fa:16:3e:50:5f:f2", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap548d4882-d6", "ovs_interfaceid": "548d4882-d674-412a-9b89-4691e9eda165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2417.455448] env[63279]: DEBUG nova.compute.manager [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Received event network-changed-cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2417.455650] env[63279]: DEBUG nova.compute.manager [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Refreshing instance network info cache due to event network-changed-cfd0d031-3ccd-4ad0-9c99-6436a05c108d. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2417.455870] env[63279]: DEBUG oslo_concurrency.lockutils [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] Acquiring lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2417.456041] env[63279]: DEBUG oslo_concurrency.lockutils [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] Acquired lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2417.456662] env[63279]: DEBUG nova.network.neutron [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Refreshing network info cache for port cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2417.515688] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088191, 'name': CreateVM_Task, 'duration_secs': 0.502316} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2417.515860] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2417.516542] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4faf9f78-8c3c-4094-a5ab-792d6ef75b92" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2417.516718] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4faf9f78-8c3c-4094-a5ab-792d6ef75b92" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2417.517113] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4faf9f78-8c3c-4094-a5ab-792d6ef75b92" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2417.517365] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-545555d8-81fb-47c9-9126-92633c22d551 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.522379] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2417.522379] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52285714-b152-1d53-bb88-76d696745e07" [ 2417.522379] env[63279]: _type = "Task" [ 2417.522379] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2417.527129] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: ee1b4746-49ac-425c-8219-4d54cb34abe0] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2417.533467] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52285714-b152-1d53-bb88-76d696745e07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2417.641665] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2417.734520] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c9c945-b018-5db9-b6b2-74ef50ee8df5, 'name': SearchDatastore_Task, 'duration_secs': 0.009797} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2417.735328] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0541d003-d6a6-4105-9025-3c23395621f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.740677] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2417.740677] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c57ef1-fe2c-e2f7-e863-ccf6c6134692" [ 2417.740677] env[63279]: _type = "Task" [ 2417.740677] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2417.748382] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c57ef1-fe2c-e2f7-e863-ccf6c6134692, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2417.775229] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "246b5346-93b1-4a84-921c-d028f3554d3d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2417.775468] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "246b5346-93b1-4a84-921c-d028f3554d3d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2417.804549] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2417.886735] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2417.886735] env[63279]: DEBUG nova.compute.manager [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Instance network_info: |[{"id": "548d4882-d674-412a-9b89-4691e9eda165", "address": "fa:16:3e:50:5f:f2", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap548d4882-d6", "ovs_interfaceid": "548d4882-d674-412a-9b89-4691e9eda165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2417.886735] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:5f:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57d4be17-536f-4a81-bea9-6547bd50f4a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '548d4882-d674-412a-9b89-4691e9eda165', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2417.892051] env[63279]: DEBUG oslo.service.loopingcall [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2417.894762] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2417.895385] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97a54ae2-4475-4ceb-8f4b-001e6e16d25d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.917929] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2417.917929] env[63279]: value = "task-2088192" [ 2417.917929] env[63279]: _type = "Task" [ 2417.917929] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2417.927961] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088192, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2417.933898] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c533621-d66a-421c-babb-422a524a622a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.940607] env[63279]: DEBUG nova.compute.manager [req-6f416998-51bc-4c4e-80cf-e66723759bde req-a0441d8e-7a6a-4a24-86fe-005f5c779145 service nova] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Received event network-vif-deleted-a05a3f68-d80e-46c9-9065-1f803e7c04ff {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2417.940607] env[63279]: DEBUG nova.compute.manager [req-6f416998-51bc-4c4e-80cf-e66723759bde req-a0441d8e-7a6a-4a24-86fe-005f5c779145 service nova] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Received event network-vif-deleted-15cdfe62-d983-4e01-beb9-1947d51443e0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2417.942972] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b26456-e9ef-415a-82f0-68811e4d095a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.977477] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e9ae7d-c70f-46b9-9b7c-1f221bc0c937 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.986523] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7eb0136-99b8-496d-a581-21dfbdd823a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.002021] env[63279]: DEBUG nova.compute.provider_tree [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2418.032154] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4faf9f78-8c3c-4094-a5ab-792d6ef75b92" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2418.032498] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Processing image 4faf9f78-8c3c-4094-a5ab-792d6ef75b92 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2418.032647] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4faf9f78-8c3c-4094-a5ab-792d6ef75b92/4faf9f78-8c3c-4094-a5ab-792d6ef75b92.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2418.032799] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4faf9f78-8c3c-4094-a5ab-792d6ef75b92/4faf9f78-8c3c-4094-a5ab-792d6ef75b92.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2418.032980] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2418.033236] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a574619-7e00-463f-8349-55748669f834 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.035282] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: f2a68d73-49d6-4b38-aff1-c2eb850f2ca6] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2418.043440] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2418.043643] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2418.044358] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9898d14f-bd9e-4abe-abb9-2fa292c3b24c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.051893] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2418.051893] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]524fb3c7-07d3-81bd-ae8f-72847db28f82" [ 2418.051893] env[63279]: _type = "Task" [ 2418.051893] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2418.059244] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524fb3c7-07d3-81bd-ae8f-72847db28f82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2418.194392] env[63279]: DEBUG nova.network.neutron [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Updated VIF entry in instance network info cache for port cfd0d031-3ccd-4ad0-9c99-6436a05c108d. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2418.194786] env[63279]: DEBUG nova.network.neutron [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Updating instance_info_cache with network_info: [{"id": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "address": "fa:16:3e:ff:42:fd", "network": {"id": "4f906777-9da9-42b1-9146-359f04c7c47f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-923457018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "131db3d2daa24712b6e11592cf789b33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfd0d031-3c", "ovs_interfaceid": "cfd0d031-3ccd-4ad0-9c99-6436a05c108d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2418.250424] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c57ef1-fe2c-e2f7-e863-ccf6c6134692, 'name': SearchDatastore_Task, 'duration_secs': 0.00993} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2418.250692] env[63279]: DEBUG oslo_concurrency.lockutils [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2418.250952] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6e8d9959-7c9d-48ee-81a5-bbdc6234248f/6e8d9959-7c9d-48ee-81a5-bbdc6234248f.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2418.251253] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f609ded-a580-4eb7-b578-ebb0fd5b5c54 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.259442] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2418.259442] env[63279]: value = "task-2088193" [ 2418.259442] env[63279]: _type = "Task" [ 2418.259442] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2418.266804] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088193, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2418.278680] env[63279]: DEBUG nova.compute.utils [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2418.431242] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088192, 'name': CreateVM_Task, 'duration_secs': 0.37232} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2418.431242] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2418.431911] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2418.432108] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2418.432469] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2418.432725] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f0f766c-05ff-4841-8666-138af2739ea3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.439130] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2418.439130] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]522b528a-0ea4-d6f8-a424-416e0d6d049c" [ 2418.439130] env[63279]: _type = "Task" [ 2418.439130] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2418.447279] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]522b528a-0ea4-d6f8-a424-416e0d6d049c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2418.524164] env[63279]: ERROR nova.scheduler.client.report [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] [req-0597eb78-683b-421b-99b6-ff69384f6d9d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0597eb78-683b-421b-99b6-ff69384f6d9d"}]} [ 2418.538159] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: f7f88f1a-a81a-4208-88d7-6a264e642ab1] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2418.541859] env[63279]: DEBUG nova.scheduler.client.report [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2418.559573] env[63279]: DEBUG nova.scheduler.client.report [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2418.559824] env[63279]: DEBUG nova.compute.provider_tree [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2418.566633] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Preparing fetch location {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2418.566900] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Fetch image to [datastore1] OSTACK_IMG_c5003216-8903-45f3-a4c0-f43c47b0f25c/OSTACK_IMG_c5003216-8903-45f3-a4c0-f43c47b0f25c.vmdk {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2418.567109] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Downloading stream optimized image 4faf9f78-8c3c-4094-a5ab-792d6ef75b92 to [datastore1] OSTACK_IMG_c5003216-8903-45f3-a4c0-f43c47b0f25c/OSTACK_IMG_c5003216-8903-45f3-a4c0-f43c47b0f25c.vmdk on the data store datastore1 as vApp {{(pid=63279) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2418.567286] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Downloading image file data 4faf9f78-8c3c-4094-a5ab-792d6ef75b92 to the ESX as VM named 'OSTACK_IMG_c5003216-8903-45f3-a4c0-f43c47b0f25c' {{(pid=63279) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2418.576111] env[63279]: DEBUG nova.scheduler.client.report [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2418.613357] env[63279]: DEBUG nova.scheduler.client.report [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2418.654015] env[63279]: DEBUG oslo_vmware.rw_handles [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2418.654015] env[63279]: value = "resgroup-9" [ 2418.654015] env[63279]: _type = "ResourcePool" [ 2418.654015] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2418.654351] env[63279]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-0a24e662-3d04-4cd3-8dd0-43a46653ed29 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.681857] env[63279]: DEBUG oslo_vmware.rw_handles [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lease: (returnval){ [ 2418.681857] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526b4dbc-9c61-db17-bb64-7ab50e6945bf" [ 2418.681857] env[63279]: _type = "HttpNfcLease" [ 2418.681857] env[63279]: } obtained for vApp import into resource pool (val){ [ 2418.681857] env[63279]: value = "resgroup-9" [ 2418.681857] env[63279]: _type = "ResourcePool" [ 2418.681857] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2418.682228] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the lease: (returnval){ [ 2418.682228] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526b4dbc-9c61-db17-bb64-7ab50e6945bf" [ 2418.682228] env[63279]: _type = "HttpNfcLease" [ 2418.682228] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2418.690228] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2418.690228] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526b4dbc-9c61-db17-bb64-7ab50e6945bf" [ 2418.690228] env[63279]: _type = "HttpNfcLease" [ 2418.690228] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2418.696918] env[63279]: DEBUG oslo_concurrency.lockutils [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] Releasing lock "refresh_cache-efddec10-b496-446e-a05a-72c9f2d86ed9" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2418.697192] env[63279]: DEBUG nova.compute.manager [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Received event network-vif-plugged-548d4882-d674-412a-9b89-4691e9eda165 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2418.697393] env[63279]: DEBUG oslo_concurrency.lockutils [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] Acquiring lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2418.697601] env[63279]: DEBUG oslo_concurrency.lockutils [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] Lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2418.697769] env[63279]: DEBUG oslo_concurrency.lockutils [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] Lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2418.697942] env[63279]: DEBUG nova.compute.manager [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] No waiting events found dispatching network-vif-plugged-548d4882-d674-412a-9b89-4691e9eda165 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2418.698134] env[63279]: WARNING nova.compute.manager [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Received unexpected event network-vif-plugged-548d4882-d674-412a-9b89-4691e9eda165 for instance with vm_state building and task_state spawning. [ 2418.698306] env[63279]: DEBUG nova.compute.manager [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Received event network-changed-548d4882-d674-412a-9b89-4691e9eda165 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2418.698468] env[63279]: DEBUG nova.compute.manager [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Refreshing instance network info cache due to event network-changed-548d4882-d674-412a-9b89-4691e9eda165. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2418.698655] env[63279]: DEBUG oslo_concurrency.lockutils [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] Acquiring lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2418.698812] env[63279]: DEBUG oslo_concurrency.lockutils [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] Acquired lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2418.698995] env[63279]: DEBUG nova.network.neutron [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Refreshing network info cache for port 548d4882-d674-412a-9b89-4691e9eda165 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2418.761773] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61593d3f-eea6-42d8-9093-532b6d0f1ffe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.774930] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088193, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454647} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2418.776093] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59d4566-2015-4f9d-bf93-fa880d98dd4e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.779114] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6e8d9959-7c9d-48ee-81a5-bbdc6234248f/6e8d9959-7c9d-48ee-81a5-bbdc6234248f.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2418.779392] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2418.779920] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0df08e6c-6c18-4b55-9bab-2e8d93618f5b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.782455] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "246b5346-93b1-4a84-921c-d028f3554d3d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2418.813337] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666738ce-c20e-471c-8675-77f5fb2a25f1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.816172] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2418.816172] env[63279]: value = "task-2088195" [ 2418.816172] env[63279]: _type = "Task" [ 2418.816172] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2418.822864] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3f6ede-a120-410e-a2c7-77718ff08bb8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.829577] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2418.840023] env[63279]: DEBUG nova.compute.provider_tree [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2418.949744] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]522b528a-0ea4-d6f8-a424-416e0d6d049c, 'name': SearchDatastore_Task, 'duration_secs': 0.057355} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2418.949744] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2418.949937] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2418.950184] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2418.950334] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2418.950517] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2418.950771] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e6c7f122-7cf6-4135-a935-95015a685fc3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.958514] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2418.958719] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2418.959468] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7ba73ad-579d-4bf8-9124-e3d81849c2a4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.964483] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2418.964483] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52950171-bf32-183a-7de0-0b05546acdf8" [ 2418.964483] env[63279]: _type = "Task" [ 2418.964483] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2418.973461] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52950171-bf32-183a-7de0-0b05546acdf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2419.042089] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: c8b42e3b-b841-4b79-a4f3-ef62577d4902] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2419.190138] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2419.190138] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526b4dbc-9c61-db17-bb64-7ab50e6945bf" [ 2419.190138] env[63279]: _type = "HttpNfcLease" [ 2419.190138] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2419.326504] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069259} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2419.326811] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2419.327672] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfdfb148-0274-4e3b-832a-80e9c4d0dae2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.349447] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 6e8d9959-7c9d-48ee-81a5-bbdc6234248f/6e8d9959-7c9d-48ee-81a5-bbdc6234248f.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2419.354510] env[63279]: DEBUG nova.scheduler.client.report [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2419.357440] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78aa4e95-376b-4e77-b349-c9a461e0724e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.380249] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2419.380249] env[63279]: value = "task-2088196" [ 2419.380249] env[63279]: _type = "Task" [ 2419.380249] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2419.388925] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088196, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2419.438830] env[63279]: DEBUG nova.network.neutron [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updated VIF entry in instance network info cache for port 548d4882-d674-412a-9b89-4691e9eda165. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2419.439258] env[63279]: DEBUG nova.network.neutron [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance_info_cache with network_info: [{"id": "548d4882-d674-412a-9b89-4691e9eda165", "address": "fa:16:3e:50:5f:f2", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap548d4882-d6", "ovs_interfaceid": "548d4882-d674-412a-9b89-4691e9eda165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2419.473923] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52950171-bf32-183a-7de0-0b05546acdf8, 'name': SearchDatastore_Task, 'duration_secs': 0.009254} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2419.474678] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efe76b19-adb8-456f-a560-0e58516e37b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.479458] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2419.479458] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a8063f-ff75-3bf7-9291-6309da18112e" [ 2419.479458] env[63279]: _type = "Task" [ 2419.479458] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2419.486620] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a8063f-ff75-3bf7-9291-6309da18112e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2419.545120] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 246f0945-7290-4cb7-a982-b17cb1573002] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2419.691080] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2419.691080] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526b4dbc-9c61-db17-bb64-7ab50e6945bf" [ 2419.691080] env[63279]: _type = "HttpNfcLease" [ 2419.691080] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2419.876125] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.574s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2419.878608] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "246b5346-93b1-4a84-921c-d028f3554d3d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2419.878836] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "246b5346-93b1-4a84-921c-d028f3554d3d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2419.879096] env[63279]: INFO nova.compute.manager [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Attaching volume 30fc3415-0254-4b9f-a6bb-e4447562cfab to /dev/sdb [ 2419.881202] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.240s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2419.881448] env[63279]: DEBUG nova.objects.instance [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'resources' on Instance uuid 36354325-dee0-406e-8eb6-bc3cf347a403 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2419.894439] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2419.907586] env[63279]: INFO nova.scheduler.client.report [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Deleted allocations for instance 69b3269a-2ba3-4f5f-a29c-62518c93da3d [ 2419.915011] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b082447-0472-4e21-b538-8e834361edec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.921843] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a9a366-9027-4660-803e-3207733a8095 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.935394] env[63279]: DEBUG nova.virt.block_device [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating existing volume attachment record: 8ae57810-cb03-4205-912b-395bc90dbc06 {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2419.942065] env[63279]: DEBUG oslo_concurrency.lockutils [req-c6aa3802-5122-42c5-b85f-5f0f662c1efd req-afde28b9-25fc-43b1-8804-7aa60b297293 service nova] Releasing lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2419.989512] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a8063f-ff75-3bf7-9291-6309da18112e, 'name': SearchDatastore_Task, 'duration_secs': 0.009522} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2419.990066] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2419.990358] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6c4d6006-656b-4d30-a595-8985d7f6b1e7/6c4d6006-656b-4d30-a595-8985d7f6b1e7.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2419.990644] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c1f64cc-e54c-4763-b8ce-63feb597e2b3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.998068] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2419.998068] env[63279]: value = "task-2088197" [ 2419.998068] env[63279]: _type = "Task" [ 2419.998068] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2420.006431] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088197, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2420.048922] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 4871421f-0015-4973-bb5f-c9042d411c82] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2420.192749] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2420.192749] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526b4dbc-9c61-db17-bb64-7ab50e6945bf" [ 2420.192749] env[63279]: _type = "HttpNfcLease" [ 2420.192749] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2420.396885] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2420.421608] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5a994934-1fd4-4f30-8016-3d5f78a5cf31 tempest-ServersTestJSON-1420611653 tempest-ServersTestJSON-1420611653-project-member] Lock "69b3269a-2ba3-4f5f-a29c-62518c93da3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.520s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2420.497876] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e0ed32-e031-4bc6-b267-5986ea2ddc00 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.509546] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088197, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.439621} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2420.511635] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 6c4d6006-656b-4d30-a595-8985d7f6b1e7/6c4d6006-656b-4d30-a595-8985d7f6b1e7.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2420.511863] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2420.512170] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-464c71c0-0f0e-4fb3-a1e8-e9297199a13e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.514497] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b327c529-2c47-41fc-bbb5-e2b8d0375d0d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.546604] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f518dc-e2ff-4ece-ae66-b0595bd62f43 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.549250] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2420.549250] env[63279]: value = "task-2088201" [ 2420.549250] env[63279]: _type = "Task" [ 2420.549250] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2420.554677] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: df963c29-a1c4-4f28-be95-cafe3af4d2fa] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2420.558113] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65221af6-92a8-4e44-bf3a-9d5be6624138 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.564998] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088201, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2420.575654] env[63279]: DEBUG nova.compute.provider_tree [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2420.692337] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2420.692337] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526b4dbc-9c61-db17-bb64-7ab50e6945bf" [ 2420.692337] env[63279]: _type = "HttpNfcLease" [ 2420.692337] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2420.692719] env[63279]: DEBUG oslo_vmware.rw_handles [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2420.692719] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]526b4dbc-9c61-db17-bb64-7ab50e6945bf" [ 2420.692719] env[63279]: _type = "HttpNfcLease" [ 2420.692719] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2420.693532] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a75a1ef-63ef-4424-8424-da49b59cf918 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.701165] env[63279]: DEBUG oslo_vmware.rw_handles [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52293b35-5256-68b6-ea1c-761c86aea288/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2420.701350] env[63279]: DEBUG oslo_vmware.rw_handles [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Creating HTTP connection to write to file with size = 31670272 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52293b35-5256-68b6-ea1c-761c86aea288/disk-0.vmdk. {{(pid=63279) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2420.765733] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cf522916-6ac8-4044-ab76-8074310ace7d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.893239] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088196, 'name': ReconfigVM_Task, 'duration_secs': 1.12994} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2420.894198] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 6e8d9959-7c9d-48ee-81a5-bbdc6234248f/6e8d9959-7c9d-48ee-81a5-bbdc6234248f.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2420.894287] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75789193-2c71-422e-b027-2737fa5facec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.901176] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2420.901176] env[63279]: value = "task-2088202" [ 2420.901176] env[63279]: _type = "Task" [ 2420.901176] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2420.909537] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088202, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2421.062207] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088201, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064643} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2421.064422] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: a15141bc-424d-48ca-a6d5-c859a3639a0b] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2421.066404] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2421.067479] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f95de0-e11a-4689-83c7-30ee3312ac8f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.085700] env[63279]: DEBUG nova.scheduler.client.report [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2421.097979] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 6c4d6006-656b-4d30-a595-8985d7f6b1e7/6c4d6006-656b-4d30-a595-8985d7f6b1e7.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2421.104713] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f26c5fb0-9f46-4aa4-ad7e-7d7d52b40f97 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.128908] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2421.128908] env[63279]: value = "task-2088203" [ 2421.128908] env[63279]: _type = "Task" [ 2421.128908] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2421.139839] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088203, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2421.411854] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088202, 'name': Rename_Task, 'duration_secs': 0.210185} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2421.412190] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2421.412609] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0415a2d-e3f9-4c2c-8715-cb6fab565a48 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.419350] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2421.419350] env[63279]: value = "task-2088204" [ 2421.419350] env[63279]: _type = "Task" [ 2421.419350] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2421.431373] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088204, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2421.567916] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 6699de0a-b3f8-4d84-9c9b-d0f6899a606e] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2421.602029] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.718s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2421.603238] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.798s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2421.603623] env[63279]: DEBUG nova.objects.instance [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lazy-loading 'resources' on Instance uuid 19e10ee4-99d1-44b9-9354-4c162d541a1f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2421.636225] env[63279]: INFO nova.scheduler.client.report [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Deleted allocations for instance 36354325-dee0-406e-8eb6-bc3cf347a403 [ 2421.642773] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088203, 'name': ReconfigVM_Task, 'duration_secs': 0.346463} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2421.644768] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 6c4d6006-656b-4d30-a595-8985d7f6b1e7/6c4d6006-656b-4d30-a595-8985d7f6b1e7.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2421.647257] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42f8e239-990f-4314-bbce-22da0a7f1e52 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.653591] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2421.653591] env[63279]: value = "task-2088205" [ 2421.653591] env[63279]: _type = "Task" [ 2421.653591] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2421.663400] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088205, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2421.894508] env[63279]: DEBUG oslo_vmware.rw_handles [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Completed reading data from the image iterator. {{(pid=63279) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2421.894810] env[63279]: DEBUG oslo_vmware.rw_handles [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52293b35-5256-68b6-ea1c-761c86aea288/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2421.895787] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082caa70-b053-4ff4-8cdf-41ea86d48779 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.902678] env[63279]: DEBUG oslo_vmware.rw_handles [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52293b35-5256-68b6-ea1c-761c86aea288/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2421.902902] env[63279]: DEBUG oslo_vmware.rw_handles [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52293b35-5256-68b6-ea1c-761c86aea288/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2421.903215] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-3742cc6d-4c46-4f51-a4c5-d9a4630f063a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.929221] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088204, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2422.072339] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: fd9b1666-8e06-4ed0-9187-05a40e136a1d] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2422.150774] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e2edff00-0842-48f8-a733-a2bb5f6b092c tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "36354325-dee0-406e-8eb6-bc3cf347a403" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.069s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2422.164691] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088205, 'name': Rename_Task, 'duration_secs': 0.155239} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2422.166467] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2422.166467] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6a24e93-37b1-4805-a1ac-7ed5f583506c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.172757] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2422.172757] env[63279]: value = "task-2088206" [ 2422.172757] env[63279]: _type = "Task" [ 2422.172757] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2422.183343] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088206, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2422.249672] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b7cf43-a24f-4138-96cf-5ba002fa7c3b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.254956] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6bb2f6-84ba-4649-ab6b-1d9e5e360cae {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.294635] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200886a4-6379-4548-996b-1ebd34745af4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.309678] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50e5104-aa23-4a45-b5f3-39d8c81d95e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.327846] env[63279]: DEBUG nova.compute.provider_tree [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2422.357664] env[63279]: DEBUG oslo_vmware.rw_handles [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52293b35-5256-68b6-ea1c-761c86aea288/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2422.357970] env[63279]: INFO nova.virt.vmwareapi.images [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Downloaded image file data 4faf9f78-8c3c-4094-a5ab-792d6ef75b92 [ 2422.358733] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e547124-6c88-44c3-b595-7a79d815fa86 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.377223] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d9ad683-3db4-4584-9992-05a51f8b163a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.409526] env[63279]: INFO nova.virt.vmwareapi.images [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] The imported VM was unregistered [ 2422.412196] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Caching image {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2422.412683] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Creating directory with path [datastore1] devstack-image-cache_base/4faf9f78-8c3c-4094-a5ab-792d6ef75b92 {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2422.412911] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6957f83b-e03a-45ef-ba44-63b4cec6cc3b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.426230] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Created directory with path [datastore1] devstack-image-cache_base/4faf9f78-8c3c-4094-a5ab-792d6ef75b92 {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2422.426425] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_c5003216-8903-45f3-a4c0-f43c47b0f25c/OSTACK_IMG_c5003216-8903-45f3-a4c0-f43c47b0f25c.vmdk to [datastore1] devstack-image-cache_base/4faf9f78-8c3c-4094-a5ab-792d6ef75b92/4faf9f78-8c3c-4094-a5ab-792d6ef75b92.vmdk. {{(pid=63279) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2422.426676] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-cec9cd72-e1ad-4933-aca9-f2ea39920840 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.433016] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088204, 'name': PowerOnVM_Task, 'duration_secs': 0.554794} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2422.433811] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2422.434358] env[63279]: DEBUG nova.compute.manager [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2422.435435] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6680ead7-397d-424b-94b6-683e88fd061b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.439191] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2422.439191] env[63279]: value = "task-2088209" [ 2422.439191] env[63279]: _type = "Task" [ 2422.439191] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2422.453176] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088209, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2422.579889] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: cd00cb0e-30e5-4a0c-8612-ea92e5e32edd] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2422.685270] env[63279]: DEBUG oslo_vmware.api [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088206, 'name': PowerOnVM_Task, 'duration_secs': 0.465105} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2422.685565] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2422.685779] env[63279]: INFO nova.compute.manager [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Took 6.80 seconds to spawn the instance on the hypervisor. [ 2422.685967] env[63279]: DEBUG nova.compute.manager [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2422.686777] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169b0a8e-86a2-44a6-b05d-7461a29aaf5c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.805457] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "6f839780-be92-4d99-a96d-1fc14c819599" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2422.805761] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "6f839780-be92-4d99-a96d-1fc14c819599" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2422.805972] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "6f839780-be92-4d99-a96d-1fc14c819599-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2422.806187] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "6f839780-be92-4d99-a96d-1fc14c819599-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2422.806362] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "6f839780-be92-4d99-a96d-1fc14c819599-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2422.808818] env[63279]: INFO nova.compute.manager [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Terminating instance [ 2422.831767] env[63279]: DEBUG nova.scheduler.client.report [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2422.952364] env[63279]: INFO nova.compute.manager [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] bringing vm to original state: 'stopped' [ 2422.955278] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088209, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2423.083954] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: b981ac83-6c23-4d44-bd28-12da30d746bd] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2423.206637] env[63279]: INFO nova.compute.manager [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Took 11.56 seconds to build instance. [ 2423.314196] env[63279]: DEBUG nova.compute.manager [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2423.314196] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2423.314441] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08646bf-872f-477c-93b9-f8ce1bf82f00 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2423.323936] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2423.324246] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4b29193-8632-4688-a1ed-ea7fe40d902d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2423.332247] env[63279]: DEBUG oslo_vmware.api [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2423.332247] env[63279]: value = "task-2088210" [ 2423.332247] env[63279]: _type = "Task" [ 2423.332247] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2423.336388] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.734s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2423.346269] env[63279]: DEBUG oslo_vmware.api [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088210, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2423.361420] env[63279]: INFO nova.scheduler.client.report [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Deleted allocations for instance 19e10ee4-99d1-44b9-9354-4c162d541a1f [ 2423.451775] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088209, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2423.587712] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 5b5f87cb-cf35-418f-b5bd-b953524a285c] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2423.708540] env[63279]: DEBUG oslo_concurrency.lockutils [None req-72d4b0a6-0bb4-4141-80e3-ffb0203f86df tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.066s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2423.844126] env[63279]: DEBUG oslo_vmware.api [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088210, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2423.871622] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9fbef605-6d62-4620-b451-b960750507a1 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "19e10ee4-99d1-44b9-9354-4c162d541a1f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.142s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2423.954703] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088209, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2423.960483] env[63279]: DEBUG oslo_concurrency.lockutils [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2423.960806] env[63279]: DEBUG oslo_concurrency.lockutils [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2423.961030] env[63279]: DEBUG nova.compute.manager [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2423.962486] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4cb96f-0729-4041-9758-f95febbfc296 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2423.971922] env[63279]: DEBUG nova.compute.manager [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2424.091830] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 0e12ab9b-a701-4e0f-9d96-939090f50494] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2424.130644] env[63279]: DEBUG nova.compute.manager [req-0923240b-b71b-43b0-9e71-58f844b44b50 req-b19748fe-3fa0-4247-8d74-c5d6fed458e8 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Received event network-changed-548d4882-d674-412a-9b89-4691e9eda165 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2424.132063] env[63279]: DEBUG nova.compute.manager [req-0923240b-b71b-43b0-9e71-58f844b44b50 req-b19748fe-3fa0-4247-8d74-c5d6fed458e8 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Refreshing instance network info cache due to event network-changed-548d4882-d674-412a-9b89-4691e9eda165. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2424.132539] env[63279]: DEBUG oslo_concurrency.lockutils [req-0923240b-b71b-43b0-9e71-58f844b44b50 req-b19748fe-3fa0-4247-8d74-c5d6fed458e8 service nova] Acquiring lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2424.132748] env[63279]: DEBUG oslo_concurrency.lockutils [req-0923240b-b71b-43b0-9e71-58f844b44b50 req-b19748fe-3fa0-4247-8d74-c5d6fed458e8 service nova] Acquired lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2424.132937] env[63279]: DEBUG nova.network.neutron [req-0923240b-b71b-43b0-9e71-58f844b44b50 req-b19748fe-3fa0-4247-8d74-c5d6fed458e8 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Refreshing network info cache for port 548d4882-d674-412a-9b89-4691e9eda165 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2424.349542] env[63279]: DEBUG oslo_vmware.api [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088210, 'name': PowerOffVM_Task, 'duration_secs': 0.982338} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2424.349698] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2424.349790] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2424.350069] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-091a23c2-d457-4181-aa6d-8790d9fffdeb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2424.451346] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088209, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2424.477339] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2424.477776] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb16519b-0669-4d32-9cec-3ba6131fb702 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2424.481287] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2424.481634] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427799', 'volume_id': '30fc3415-0254-4b9f-a6bb-e4447562cfab', 'name': 'volume-30fc3415-0254-4b9f-a6bb-e4447562cfab', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '246b5346-93b1-4a84-921c-d028f3554d3d', 'attached_at': '', 'detached_at': '', 'volume_id': '30fc3415-0254-4b9f-a6bb-e4447562cfab', 'serial': '30fc3415-0254-4b9f-a6bb-e4447562cfab'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2424.482580] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d43110-700b-47c9-91a3-4c373f3bcd90 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2424.486884] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2424.486884] env[63279]: value = "task-2088212" [ 2424.486884] env[63279]: _type = "Task" [ 2424.486884] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2424.503521] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e17405-4175-4607-93b0-10d7ba651095 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2424.509534] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088212, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2424.532749] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] volume-30fc3415-0254-4b9f-a6bb-e4447562cfab/volume-30fc3415-0254-4b9f-a6bb-e4447562cfab.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2424.533093] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c7948ac-ed1c-4fc3-9e2e-0bdb4478094a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2424.548842] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2424.549221] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2424.549423] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Deleting the datastore file [datastore1] 6f839780-be92-4d99-a96d-1fc14c819599 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2424.550291] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac8c3c37-67cd-4f40-bf51-f747056b0ec6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2424.554928] env[63279]: DEBUG oslo_vmware.api [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2424.554928] env[63279]: value = "task-2088213" [ 2424.554928] env[63279]: _type = "Task" [ 2424.554928] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2424.561651] env[63279]: DEBUG oslo_vmware.api [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for the task: (returnval){ [ 2424.561651] env[63279]: value = "task-2088214" [ 2424.561651] env[63279]: _type = "Task" [ 2424.561651] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2424.565410] env[63279]: DEBUG oslo_vmware.api [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088213, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2424.576024] env[63279]: DEBUG oslo_vmware.api [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088214, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2424.595728] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: f6a5d157-d58c-4c7e-b6e8-f2dc7aaebd9b] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2424.951182] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088209, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2424.997213] env[63279]: DEBUG oslo_vmware.api [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088212, 'name': PowerOffVM_Task, 'duration_secs': 0.192361} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2424.998131] env[63279]: DEBUG nova.network.neutron [req-0923240b-b71b-43b0-9e71-58f844b44b50 req-b19748fe-3fa0-4247-8d74-c5d6fed458e8 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updated VIF entry in instance network info cache for port 548d4882-d674-412a-9b89-4691e9eda165. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2424.998481] env[63279]: DEBUG nova.network.neutron [req-0923240b-b71b-43b0-9e71-58f844b44b50 req-b19748fe-3fa0-4247-8d74-c5d6fed458e8 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance_info_cache with network_info: [{"id": "548d4882-d674-412a-9b89-4691e9eda165", "address": "fa:16:3e:50:5f:f2", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap548d4882-d6", "ovs_interfaceid": "548d4882-d674-412a-9b89-4691e9eda165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2424.999801] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2425.000035] env[63279]: DEBUG nova.compute.manager [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2425.001347] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d332ba-7981-4f46-b889-88b807d88bdf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2425.066535] env[63279]: DEBUG oslo_vmware.api [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088213, 'name': ReconfigVM_Task, 'duration_secs': 0.428662} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2425.069443] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Reconfigured VM instance instance-0000006f to attach disk [datastore1] volume-30fc3415-0254-4b9f-a6bb-e4447562cfab/volume-30fc3415-0254-4b9f-a6bb-e4447562cfab.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2425.074069] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db892664-e968-402d-b702-972227f4e931 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2425.089489] env[63279]: DEBUG oslo_vmware.api [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088214, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2425.090804] env[63279]: DEBUG oslo_vmware.api [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2425.090804] env[63279]: value = "task-2088215" [ 2425.090804] env[63279]: _type = "Task" [ 2425.090804] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2425.098893] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: eca98392-98be-405b-b799-463ef9ee3dc8] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2425.100671] env[63279]: DEBUG oslo_vmware.api [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088215, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2425.451911] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088209, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.675672} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2425.452239] env[63279]: INFO nova.virt.vmwareapi.ds_util [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_c5003216-8903-45f3-a4c0-f43c47b0f25c/OSTACK_IMG_c5003216-8903-45f3-a4c0-f43c47b0f25c.vmdk to [datastore1] devstack-image-cache_base/4faf9f78-8c3c-4094-a5ab-792d6ef75b92/4faf9f78-8c3c-4094-a5ab-792d6ef75b92.vmdk. [ 2425.452434] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Cleaning up location [datastore1] OSTACK_IMG_c5003216-8903-45f3-a4c0-f43c47b0f25c {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2425.452605] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_c5003216-8903-45f3-a4c0-f43c47b0f25c {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2425.452933] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e35de436-fc84-4572-910c-359292230106 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2425.459369] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2425.459369] env[63279]: value = "task-2088216" [ 2425.459369] env[63279]: _type = "Task" [ 2425.459369] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2425.467451] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088216, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2425.501289] env[63279]: DEBUG oslo_concurrency.lockutils [req-0923240b-b71b-43b0-9e71-58f844b44b50 req-b19748fe-3fa0-4247-8d74-c5d6fed458e8 service nova] Releasing lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2425.512699] env[63279]: DEBUG oslo_concurrency.lockutils [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.552s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2425.576389] env[63279]: DEBUG oslo_vmware.api [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Task: {'id': task-2088214, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.667057} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2425.576664] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2425.576864] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2425.577146] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2425.577332] env[63279]: INFO nova.compute.manager [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Took 2.26 seconds to destroy the instance on the hypervisor. [ 2425.577587] env[63279]: DEBUG oslo.service.loopingcall [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2425.578502] env[63279]: DEBUG nova.compute.manager [-] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2425.578638] env[63279]: DEBUG nova.network.neutron [-] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2425.601916] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: f375b54b-f9de-4529-b752-52c240aed532] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2425.603876] env[63279]: DEBUG oslo_vmware.api [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088215, 'name': ReconfigVM_Task, 'duration_secs': 0.148807} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2425.604388] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427799', 'volume_id': '30fc3415-0254-4b9f-a6bb-e4447562cfab', 'name': 'volume-30fc3415-0254-4b9f-a6bb-e4447562cfab', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '246b5346-93b1-4a84-921c-d028f3554d3d', 'attached_at': '', 'detached_at': '', 'volume_id': '30fc3415-0254-4b9f-a6bb-e4447562cfab', 'serial': '30fc3415-0254-4b9f-a6bb-e4447562cfab'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2425.915148] env[63279]: DEBUG nova.compute.manager [req-a3bc2ecb-f564-483c-8224-92999cf1236d req-8ba7f313-3787-485a-b307-6bee121fd16f service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Received event network-vif-deleted-d0192def-50a0-40c0-9921-fbdf13e63ffb {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2425.915373] env[63279]: INFO nova.compute.manager [req-a3bc2ecb-f564-483c-8224-92999cf1236d req-8ba7f313-3787-485a-b307-6bee121fd16f service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Neutron deleted interface d0192def-50a0-40c0-9921-fbdf13e63ffb; detaching it from the instance and deleting it from the info cache [ 2425.915373] env[63279]: DEBUG nova.network.neutron [req-a3bc2ecb-f564-483c-8224-92999cf1236d req-8ba7f313-3787-485a-b307-6bee121fd16f service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2425.969440] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088216, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035709} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2425.969627] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2425.969773] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4faf9f78-8c3c-4094-a5ab-792d6ef75b92/4faf9f78-8c3c-4094-a5ab-792d6ef75b92.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2425.970032] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4faf9f78-8c3c-4094-a5ab-792d6ef75b92/4faf9f78-8c3c-4094-a5ab-792d6ef75b92.vmdk to [datastore1] efddec10-b496-446e-a05a-72c9f2d86ed9/efddec10-b496-446e-a05a-72c9f2d86ed9.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2425.970289] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-465857b5-e1a1-4419-a92b-751ea8f55a7f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2425.977263] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2425.977263] env[63279]: value = "task-2088217" [ 2425.977263] env[63279]: _type = "Task" [ 2425.977263] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2425.985386] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088217, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2426.020506] env[63279]: DEBUG oslo_concurrency.lockutils [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2426.021346] env[63279]: DEBUG oslo_concurrency.lockutils [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2426.021346] env[63279]: DEBUG nova.objects.instance [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2426.105952] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: c1ac4af5-b01e-4175-844f-7a67b2ef7526] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2426.262649] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "3832508d-5d12-42a2-93d8-61775907b2d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2426.262899] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2426.389072] env[63279]: DEBUG nova.network.neutron [-] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2426.418715] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86e52ff6-ed6e-4974-a522-c0f7fc198d6d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.429923] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd049cc5-a001-406c-831f-22789c8e750e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.459925] env[63279]: DEBUG nova.compute.manager [req-a3bc2ecb-f564-483c-8224-92999cf1236d req-8ba7f313-3787-485a-b307-6bee121fd16f service nova] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Detach interface failed, port_id=d0192def-50a0-40c0-9921-fbdf13e63ffb, reason: Instance 6f839780-be92-4d99-a96d-1fc14c819599 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2426.488303] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088217, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2426.585309] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2426.585641] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2426.585884] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2426.586128] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2426.586342] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2426.588768] env[63279]: INFO nova.compute.manager [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Terminating instance [ 2426.612237] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 2f5e22f6-ba70-4848-965b-eb1553115323] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2426.645881] env[63279]: DEBUG nova.objects.instance [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'flavor' on Instance uuid 246b5346-93b1-4a84-921c-d028f3554d3d {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2426.765903] env[63279]: DEBUG nova.compute.manager [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2426.891868] env[63279]: INFO nova.compute.manager [-] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Took 1.31 seconds to deallocate network for instance. [ 2426.991495] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088217, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2427.029482] env[63279]: DEBUG oslo_concurrency.lockutils [None req-37cb8674-bf68-4b1b-ae68-296c49ed5420 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2427.093764] env[63279]: DEBUG nova.compute.manager [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2427.094019] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2427.094946] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278026f2-119d-475b-a818-5e8c024894a4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.105362] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2427.105707] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a0e9324-8a3b-4e91-b961-99ceae4b8165 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.115971] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 32dbef6d-d314-4fa6-972a-e7b1f22eb11d] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2427.151116] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a0b37831-7bbd-440a-8f01-2fbcf0166010 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "246b5346-93b1-4a84-921c-d028f3554d3d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.272s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2427.284697] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2427.284989] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2427.285299] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleting the datastore file [datastore1] 6e8d9959-7c9d-48ee-81a5-bbdc6234248f {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2427.285609] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ef3c01e-fa26-4363-8574-4441ffd91b93 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.288903] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2427.289302] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2427.291463] env[63279]: INFO nova.compute.claims [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2427.300965] env[63279]: DEBUG oslo_vmware.api [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2427.300965] env[63279]: value = "task-2088219" [ 2427.300965] env[63279]: _type = "Task" [ 2427.300965] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2427.311338] env[63279]: DEBUG oslo_vmware.api [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088219, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2427.399840] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2427.490864] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088217, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2427.621673] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 4a9088e0-2992-4b18-8be9-6bc70633369b] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2427.814155] env[63279]: DEBUG oslo_vmware.api [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088219, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2427.990262] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088217, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2428.040787] env[63279]: DEBUG nova.compute.manager [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Stashing vm_state: active {{(pid=63279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2428.125509] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 5656c853-ac83-47be-83c4-979a9e87ab91] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2428.312347] env[63279]: DEBUG oslo_vmware.api [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088219, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.964792} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2428.312631] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2428.312823] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2428.312996] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2428.313197] env[63279]: INFO nova.compute.manager [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Took 1.22 seconds to destroy the instance on the hypervisor. [ 2428.313452] env[63279]: DEBUG oslo.service.loopingcall [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2428.313624] env[63279]: DEBUG nova.compute.manager [-] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2428.313721] env[63279]: DEBUG nova.network.neutron [-] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2428.401020] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f506b1c8-2a45-45e7-8593-f548b785556a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2428.411411] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fc181d-5d50-43dd-ae2f-863a3dfe332f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2428.444068] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae47bbf-79c0-40f7-beba-dbe70d442441 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2428.451149] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a469675-64d1-4e27-b46d-aafa78922d6b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2428.464031] env[63279]: DEBUG nova.compute.provider_tree [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2428.488943] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088217, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.240634} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2428.489243] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4faf9f78-8c3c-4094-a5ab-792d6ef75b92/4faf9f78-8c3c-4094-a5ab-792d6ef75b92.vmdk to [datastore1] efddec10-b496-446e-a05a-72c9f2d86ed9/efddec10-b496-446e-a05a-72c9f2d86ed9.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2428.490036] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1cbbbc-a632-4a5a-87a6-39679321460d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2428.511530] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] efddec10-b496-446e-a05a-72c9f2d86ed9/efddec10-b496-446e-a05a-72c9f2d86ed9.vmdk or device None with type streamOptimized {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2428.512106] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-347977d9-04ca-4a28-9862-12bd8e7169d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2428.530871] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2428.530871] env[63279]: value = "task-2088220" [ 2428.530871] env[63279]: _type = "Task" [ 2428.530871] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2428.541066] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088220, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2428.560900] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2428.584580] env[63279]: DEBUG nova.compute.manager [req-4a4f2d60-9953-4ae3-a76f-b147fa33d8b2 req-26db76b6-3f43-4293-84b6-a5c281e1907d service nova] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Received event network-vif-deleted-c72218a8-035f-4947-b145-503e00034664 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2428.585076] env[63279]: INFO nova.compute.manager [req-4a4f2d60-9953-4ae3-a76f-b147fa33d8b2 req-26db76b6-3f43-4293-84b6-a5c281e1907d service nova] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Neutron deleted interface c72218a8-035f-4947-b145-503e00034664; detaching it from the instance and deleting it from the info cache [ 2428.585076] env[63279]: DEBUG nova.network.neutron [req-4a4f2d60-9953-4ae3-a76f-b147fa33d8b2 req-26db76b6-3f43-4293-84b6-a5c281e1907d service nova] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2428.629449] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 6388f912-ae70-4e8f-b8e4-ceb02e0f8a51] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2428.970615] env[63279]: DEBUG nova.scheduler.client.report [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2429.041290] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088220, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2429.055039] env[63279]: DEBUG nova.network.neutron [-] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2429.088243] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-41adf0dd-d65e-4624-95b2-1ca94bda87e1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.097294] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdabbbf2-d95e-4478-b05c-db114476144a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.123246] env[63279]: DEBUG nova.compute.manager [req-4a4f2d60-9953-4ae3-a76f-b147fa33d8b2 req-26db76b6-3f43-4293-84b6-a5c281e1907d service nova] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Detach interface failed, port_id=c72218a8-035f-4947-b145-503e00034664, reason: Instance 6e8d9959-7c9d-48ee-81a5-bbdc6234248f could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2429.131933] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2429.132211] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Cleaning up deleted instances with incomplete migration {{(pid=63279) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11785}} [ 2429.475813] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.186s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2429.476547] env[63279]: DEBUG nova.compute.manager [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2429.479321] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.080s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2429.479585] env[63279]: DEBUG nova.objects.instance [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lazy-loading 'resources' on Instance uuid 6f839780-be92-4d99-a96d-1fc14c819599 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2429.541878] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088220, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2429.557977] env[63279]: INFO nova.compute.manager [-] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Took 1.24 seconds to deallocate network for instance. [ 2429.636259] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2429.983027] env[63279]: DEBUG nova.compute.utils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2429.987585] env[63279]: DEBUG nova.compute.manager [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2429.987585] env[63279]: DEBUG nova.network.neutron [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2430.031428] env[63279]: DEBUG nova.policy [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99f3a4d8a93c4bb98ea3c4bf110c066b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7674483744fe490b8cbe75532dfad77c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2430.044379] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088220, 'name': ReconfigVM_Task, 'duration_secs': 1.323691} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2430.044659] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Reconfigured VM instance instance-00000068 to attach disk [datastore1] efddec10-b496-446e-a05a-72c9f2d86ed9/efddec10-b496-446e-a05a-72c9f2d86ed9.vmdk or device None with type streamOptimized {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2430.045306] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-edeae9bf-2757-49a2-a6b0-02c3a17433b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.052044] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2430.052044] env[63279]: value = "task-2088221" [ 2430.052044] env[63279]: _type = "Task" [ 2430.052044] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2430.059691] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088221, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2430.063739] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2430.105131] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c6631b-3dfa-4f71-bb78-2da928fecff6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.112270] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79180cc8-3b3d-4a6e-a70f-cde4f31e531d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.142839] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667b7834-7b5d-4d83-b431-c109db135bd4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.150096] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c055c0b-fcca-4208-b12d-99f4fc620b31 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.163453] env[63279]: DEBUG nova.compute.provider_tree [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2430.334105] env[63279]: DEBUG nova.network.neutron [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Successfully created port: 07a9377a-5b9b-41f4-9f94-eb47c9b38bc4 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2430.487957] env[63279]: DEBUG nova.compute.manager [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2430.564103] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088221, 'name': Rename_Task, 'duration_secs': 0.273505} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2430.564316] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2430.564587] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c37d07b8-9fb7-4bc1-928c-2b4a05185de5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.571337] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2430.571337] env[63279]: value = "task-2088222" [ 2430.571337] env[63279]: _type = "Task" [ 2430.571337] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2430.579359] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088222, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2430.666842] env[63279]: DEBUG nova.scheduler.client.report [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2431.081781] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088222, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2431.171899] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.692s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2431.174629] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.614s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2431.201954] env[63279]: INFO nova.scheduler.client.report [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Deleted allocations for instance 6f839780-be92-4d99-a96d-1fc14c819599 [ 2431.498838] env[63279]: DEBUG nova.compute.manager [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2431.529421] env[63279]: DEBUG nova.virt.hardware [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2431.529699] env[63279]: DEBUG nova.virt.hardware [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2431.529874] env[63279]: DEBUG nova.virt.hardware [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2431.530100] env[63279]: DEBUG nova.virt.hardware [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2431.530273] env[63279]: DEBUG nova.virt.hardware [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2431.530436] env[63279]: DEBUG nova.virt.hardware [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2431.530659] env[63279]: DEBUG nova.virt.hardware [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2431.530838] env[63279]: DEBUG nova.virt.hardware [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2431.531031] env[63279]: DEBUG nova.virt.hardware [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2431.531214] env[63279]: DEBUG nova.virt.hardware [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2431.531427] env[63279]: DEBUG nova.virt.hardware [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2431.532353] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a21b86-2ab7-4845-84b5-6fb639cfee2e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2431.540154] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edadda23-44d3-41b0-8cf1-1a991c955847 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2431.580992] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088222, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2431.680204] env[63279]: INFO nova.compute.claims [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2431.709497] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4ae06451-8348-4fa0-89b9-c29961ec9216 tempest-AttachInterfacesTestJSON-1494945392 tempest-AttachInterfacesTestJSON-1494945392-project-member] Lock "6f839780-be92-4d99-a96d-1fc14c819599" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.904s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2432.082517] env[63279]: DEBUG oslo_vmware.api [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088222, 'name': PowerOnVM_Task, 'duration_secs': 1.183465} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2432.082806] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2432.172246] env[63279]: DEBUG nova.compute.manager [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2432.173160] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7d8182-15e6-47d1-8c9d-d1f153499737 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.186045] env[63279]: INFO nova.compute.resource_tracker [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating resource usage from migration 5f8a398a-1225-4b02-a73e-9bf01aba7b68 [ 2432.277208] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90966163-8fcb-452f-9767-6c7d4df06695 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.284956] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a87e60f-c58a-4272-b10c-d627349aa0b6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.315701] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e8f21c-93ed-494f-b97f-114fb528e32c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.322595] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2969f446-4b47-4ac2-ae66-7d2e33d3f435 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.335296] env[63279]: DEBUG nova.compute.provider_tree [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2432.691380] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fbd37efa-dbe8-4925-9302-efee8e5b5950 tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 33.039s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2432.838246] env[63279]: DEBUG nova.scheduler.client.report [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2433.346015] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.169s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2433.346015] env[63279]: INFO nova.compute.manager [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Migrating [ 2433.350879] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.287s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2433.351682] env[63279]: DEBUG nova.objects.instance [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lazy-loading 'resources' on Instance uuid 6e8d9959-7c9d-48ee-81a5-bbdc6234248f {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2433.862704] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2433.862888] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2433.863162] env[63279]: DEBUG nova.network.neutron [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2433.956013] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8760d34-6cce-415e-8af5-a351e5dd5fdf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.963577] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb706d6-0296-44c6-a46a-a78553974fd5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.992018] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5a748d-d35a-4d92-a8ab-498da00c2b0d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.998890] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca2e225-dfe4-40a6-b742-8004185e847d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2434.012492] env[63279]: DEBUG nova.compute.provider_tree [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2434.515882] env[63279]: DEBUG nova.scheduler.client.report [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2434.637955] env[63279]: DEBUG nova.network.neutron [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance_info_cache with network_info: [{"id": "be3c9abe-2016-4ca7-9982-16776729e694", "address": "fa:16:3e:95:a8:19", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3c9abe-20", "ovs_interfaceid": "be3c9abe-2016-4ca7-9982-16776729e694", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2435.021276] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.670s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2435.043494] env[63279]: INFO nova.scheduler.client.report [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleted allocations for instance 6e8d9959-7c9d-48ee-81a5-bbdc6234248f [ 2435.140673] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2435.552990] env[63279]: DEBUG oslo_concurrency.lockutils [None req-1e10cf47-4de8-4d9e-aba8-d0af4ee072cb tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "6e8d9959-7c9d-48ee-81a5-bbdc6234248f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.967s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2436.135825] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2436.643967] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2436.643967] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Getting list of instances from cluster (obj){ [ 2436.643967] env[63279]: value = "domain-c8" [ 2436.643967] env[63279]: _type = "ClusterComputeResource" [ 2436.643967] env[63279]: } {{(pid=63279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2436.650046] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0044df-ae11-465f-9f43-51af03eea6b2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2436.656288] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5752d103-c9e6-4b37-ba38-44506f1244c8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2436.665595] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Got total of 4 instances {{(pid=63279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2436.687060] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance '246b5346-93b1-4a84-921c-d028f3554d3d' progress to 0 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2437.195108] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2437.195108] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2bb3ea2-f9f2-429c-bf0e-84bf3ec793f1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2437.203196] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2437.203196] env[63279]: value = "task-2088223" [ 2437.203196] env[63279]: _type = "Task" [ 2437.203196] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2437.214244] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088223, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2437.382739] env[63279]: DEBUG nova.compute.manager [req-b57c3dd9-68b0-423e-8dbd-04203a344a1c req-527333c2-515b-4afd-8240-38cc45e1d12b service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Received event network-vif-plugged-07a9377a-5b9b-41f4-9f94-eb47c9b38bc4 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2437.383158] env[63279]: DEBUG oslo_concurrency.lockutils [req-b57c3dd9-68b0-423e-8dbd-04203a344a1c req-527333c2-515b-4afd-8240-38cc45e1d12b service nova] Acquiring lock "3832508d-5d12-42a2-93d8-61775907b2d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2437.383373] env[63279]: DEBUG oslo_concurrency.lockutils [req-b57c3dd9-68b0-423e-8dbd-04203a344a1c req-527333c2-515b-4afd-8240-38cc45e1d12b service nova] Lock "3832508d-5d12-42a2-93d8-61775907b2d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2437.383373] env[63279]: DEBUG oslo_concurrency.lockutils [req-b57c3dd9-68b0-423e-8dbd-04203a344a1c req-527333c2-515b-4afd-8240-38cc45e1d12b service nova] Lock "3832508d-5d12-42a2-93d8-61775907b2d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2437.383598] env[63279]: DEBUG nova.compute.manager [req-b57c3dd9-68b0-423e-8dbd-04203a344a1c req-527333c2-515b-4afd-8240-38cc45e1d12b service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] No waiting events found dispatching network-vif-plugged-07a9377a-5b9b-41f4-9f94-eb47c9b38bc4 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2437.383689] env[63279]: WARNING nova.compute.manager [req-b57c3dd9-68b0-423e-8dbd-04203a344a1c req-527333c2-515b-4afd-8240-38cc45e1d12b service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Received unexpected event network-vif-plugged-07a9377a-5b9b-41f4-9f94-eb47c9b38bc4 for instance with vm_state building and task_state spawning. [ 2437.480399] env[63279]: DEBUG nova.network.neutron [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Successfully updated port: 07a9377a-5b9b-41f4-9f94-eb47c9b38bc4 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2437.713600] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088223, 'name': PowerOffVM_Task, 'duration_secs': 0.213555} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2437.714050] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2437.714050] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance '246b5346-93b1-4a84-921c-d028f3554d3d' progress to 17 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2437.879018] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2437.879018] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2437.962663] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Acquiring lock "c24509d7-0ef2-4a5e-843d-c843888118cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2437.962897] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Lock "c24509d7-0ef2-4a5e-843d-c843888118cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2437.983468] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "refresh_cache-3832508d-5d12-42a2-93d8-61775907b2d2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2437.983579] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquired lock "refresh_cache-3832508d-5d12-42a2-93d8-61775907b2d2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2437.983738] env[63279]: DEBUG nova.network.neutron [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2438.220659] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2438.220923] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2438.221096] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2438.221341] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2438.221540] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2438.221702] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2438.221913] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2438.222094] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2438.222270] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2438.222451] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2438.222627] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2438.228634] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14eab9f8-df8a-4be5-89b4-ba42d0c8436b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.244123] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2438.244123] env[63279]: value = "task-2088224" [ 2438.244123] env[63279]: _type = "Task" [ 2438.244123] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2438.252997] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088224, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2438.382990] env[63279]: DEBUG nova.compute.manager [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2438.465712] env[63279]: DEBUG nova.compute.manager [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2438.531424] env[63279]: DEBUG nova.network.neutron [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2438.686445] env[63279]: DEBUG nova.network.neutron [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Updating instance_info_cache with network_info: [{"id": "07a9377a-5b9b-41f4-9f94-eb47c9b38bc4", "address": "fa:16:3e:4b:19:18", "network": {"id": "f7000655-b20b-461d-9d08-f4cb8a85522e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-686033866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7674483744fe490b8cbe75532dfad77c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07a9377a-5b", "ovs_interfaceid": "07a9377a-5b9b-41f4-9f94-eb47c9b38bc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2438.756323] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088224, 'name': ReconfigVM_Task, 'duration_secs': 0.206862} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2438.756700] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance '246b5346-93b1-4a84-921c-d028f3554d3d' progress to 33 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2438.907029] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2438.907029] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2438.908620] env[63279]: INFO nova.compute.claims [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2438.987883] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2439.189364] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Releasing lock "refresh_cache-3832508d-5d12-42a2-93d8-61775907b2d2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2439.189714] env[63279]: DEBUG nova.compute.manager [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Instance network_info: |[{"id": "07a9377a-5b9b-41f4-9f94-eb47c9b38bc4", "address": "fa:16:3e:4b:19:18", "network": {"id": "f7000655-b20b-461d-9d08-f4cb8a85522e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-686033866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7674483744fe490b8cbe75532dfad77c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07a9377a-5b", "ovs_interfaceid": "07a9377a-5b9b-41f4-9f94-eb47c9b38bc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2439.190188] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:19:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbe1725d-6711-4e92-9a4e-d4802651e7d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07a9377a-5b9b-41f4-9f94-eb47c9b38bc4', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2439.197539] env[63279]: DEBUG oslo.service.loopingcall [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2439.197761] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2439.197988] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7240bfc7-980b-475a-8125-664a5a1ce3d8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.217778] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2439.217778] env[63279]: value = "task-2088225" [ 2439.217778] env[63279]: _type = "Task" [ 2439.217778] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2439.225230] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088225, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2439.263670] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2439.263876] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2439.264046] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2439.264240] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2439.264392] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2439.264546] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2439.264756] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2439.264918] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2439.265097] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2439.265266] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2439.265441] env[63279]: DEBUG nova.virt.hardware [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2439.270715] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2439.271058] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85386bf0-8f3c-42e8-aac6-67a54d395459 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.290805] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2439.290805] env[63279]: value = "task-2088226" [ 2439.290805] env[63279]: _type = "Task" [ 2439.290805] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2439.299122] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088226, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2439.411030] env[63279]: DEBUG nova.compute.manager [req-5e1fa2cb-f9a9-495c-9afd-019931a7ac89 req-6124b318-9dbf-44b0-bd35-8f5eb02bde75 service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Received event network-changed-07a9377a-5b9b-41f4-9f94-eb47c9b38bc4 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2439.411280] env[63279]: DEBUG nova.compute.manager [req-5e1fa2cb-f9a9-495c-9afd-019931a7ac89 req-6124b318-9dbf-44b0-bd35-8f5eb02bde75 service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Refreshing instance network info cache due to event network-changed-07a9377a-5b9b-41f4-9f94-eb47c9b38bc4. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2439.411559] env[63279]: DEBUG oslo_concurrency.lockutils [req-5e1fa2cb-f9a9-495c-9afd-019931a7ac89 req-6124b318-9dbf-44b0-bd35-8f5eb02bde75 service nova] Acquiring lock "refresh_cache-3832508d-5d12-42a2-93d8-61775907b2d2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2439.411732] env[63279]: DEBUG oslo_concurrency.lockutils [req-5e1fa2cb-f9a9-495c-9afd-019931a7ac89 req-6124b318-9dbf-44b0-bd35-8f5eb02bde75 service nova] Acquired lock "refresh_cache-3832508d-5d12-42a2-93d8-61775907b2d2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2439.411928] env[63279]: DEBUG nova.network.neutron [req-5e1fa2cb-f9a9-495c-9afd-019931a7ac89 req-6124b318-9dbf-44b0-bd35-8f5eb02bde75 service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Refreshing network info cache for port 07a9377a-5b9b-41f4-9f94-eb47c9b38bc4 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2439.727360] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088225, 'name': CreateVM_Task, 'duration_secs': 0.475372} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2439.727508] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2439.728280] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2439.728491] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2439.728842] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2439.729142] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bed276ad-80ca-4ef8-8089-cb27fe301b67 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.734019] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2439.734019] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52bb274d-7e59-b3b8-12ad-119fe6861999" [ 2439.734019] env[63279]: _type = "Task" [ 2439.734019] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2439.741366] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bb274d-7e59-b3b8-12ad-119fe6861999, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2439.799773] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088226, 'name': ReconfigVM_Task, 'duration_secs': 0.183472} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2439.800190] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2439.800845] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab031397-dcd0-414e-82b7-52fdf2ba51cc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.825027] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 246b5346-93b1-4a84-921c-d028f3554d3d/246b5346-93b1-4a84-921c-d028f3554d3d.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2439.825262] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-264b435a-5a5f-44b7-9aec-4690700cbdaa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.842298] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2439.842298] env[63279]: value = "task-2088227" [ 2439.842298] env[63279]: _type = "Task" [ 2439.842298] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2439.849154] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088227, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2439.966748] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2439.966935] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 2440.116287] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a0a58f-6d43-424d-9457-963f96d09921 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.120405] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39783da2-6fad-4a10-9811-2f3bb9746e5e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.151237] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc929da-59ef-4f74-b10e-731f8e85ed6c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.158587] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d5cbba-9845-4221-a608-fedb6a5f5a5f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.171691] env[63279]: DEBUG nova.compute.provider_tree [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2440.245955] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52bb274d-7e59-b3b8-12ad-119fe6861999, 'name': SearchDatastore_Task, 'duration_secs': 0.011118} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2440.246294] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2440.246528] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2440.246765] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2440.246913] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2440.247107] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2440.247363] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e810715a-7977-4bb2-bcf6-9d07fb457048 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.255425] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2440.255598] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2440.256297] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3684fb74-9140-4e03-b0cb-f5dc58ac6cde {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.261154] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2440.261154] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a450b0-c104-b6e7-99ab-5bca9a3f026b" [ 2440.261154] env[63279]: _type = "Task" [ 2440.261154] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2440.270132] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a450b0-c104-b6e7-99ab-5bca9a3f026b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2440.328414] env[63279]: DEBUG nova.network.neutron [req-5e1fa2cb-f9a9-495c-9afd-019931a7ac89 req-6124b318-9dbf-44b0-bd35-8f5eb02bde75 service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Updated VIF entry in instance network info cache for port 07a9377a-5b9b-41f4-9f94-eb47c9b38bc4. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2440.328761] env[63279]: DEBUG nova.network.neutron [req-5e1fa2cb-f9a9-495c-9afd-019931a7ac89 req-6124b318-9dbf-44b0-bd35-8f5eb02bde75 service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Updating instance_info_cache with network_info: [{"id": "07a9377a-5b9b-41f4-9f94-eb47c9b38bc4", "address": "fa:16:3e:4b:19:18", "network": {"id": "f7000655-b20b-461d-9d08-f4cb8a85522e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-686033866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7674483744fe490b8cbe75532dfad77c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07a9377a-5b", "ovs_interfaceid": "07a9377a-5b9b-41f4-9f94-eb47c9b38bc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2440.352187] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088227, 'name': ReconfigVM_Task, 'duration_secs': 0.281906} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2440.352454] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 246b5346-93b1-4a84-921c-d028f3554d3d/246b5346-93b1-4a84-921c-d028f3554d3d.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2440.352732] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance '246b5346-93b1-4a84-921c-d028f3554d3d' progress to 50 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2440.675308] env[63279]: DEBUG nova.scheduler.client.report [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2440.772092] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a450b0-c104-b6e7-99ab-5bca9a3f026b, 'name': SearchDatastore_Task, 'duration_secs': 0.009374} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2440.772918] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29f81e22-f7af-4bec-8cae-70681d42a8ba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.777920] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2440.777920] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523e1b45-63bd-5614-f908-1dba7bfa2d73" [ 2440.777920] env[63279]: _type = "Task" [ 2440.777920] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2440.785714] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523e1b45-63bd-5614-f908-1dba7bfa2d73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2440.830934] env[63279]: DEBUG oslo_concurrency.lockutils [req-5e1fa2cb-f9a9-495c-9afd-019931a7ac89 req-6124b318-9dbf-44b0-bd35-8f5eb02bde75 service nova] Releasing lock "refresh_cache-3832508d-5d12-42a2-93d8-61775907b2d2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2440.858441] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c62efbf-6962-4ef3-8ee2-ec3475e3cda3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.880580] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e78c312-ecfd-4b6d-a21b-5de7677ef364 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.901720] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance '246b5346-93b1-4a84-921c-d028f3554d3d' progress to 67 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2440.978051] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2441.180881] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.273s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2441.181072] env[63279]: DEBUG nova.compute.manager [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2441.183942] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.196s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2441.185379] env[63279]: INFO nova.compute.claims [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2441.288458] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523e1b45-63bd-5614-f908-1dba7bfa2d73, 'name': SearchDatastore_Task, 'duration_secs': 0.010144} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2441.288732] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2441.288998] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 3832508d-5d12-42a2-93d8-61775907b2d2/3832508d-5d12-42a2-93d8-61775907b2d2.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2441.289282] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09328876-7bd5-44b5-8687-4c136abb71e8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.296526] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2441.296526] env[63279]: value = "task-2088228" [ 2441.296526] env[63279]: _type = "Task" [ 2441.296526] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2441.304236] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088228, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2441.482428] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Getting list of instances from cluster (obj){ [ 2441.482428] env[63279]: value = "domain-c8" [ 2441.482428] env[63279]: _type = "ClusterComputeResource" [ 2441.482428] env[63279]: } {{(pid=63279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2441.483892] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1fed204-e44f-4ca3-9db1-98fb8ea183a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.499961] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Got total of 5 instances {{(pid=63279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2441.500195] env[63279]: WARNING nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] While synchronizing instance power states, found 6 instances in the database and 5 instances on the hypervisor. [ 2441.500348] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid efddec10-b496-446e-a05a-72c9f2d86ed9 {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 2441.500552] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid befc985f-68e2-4a04-8de0-9ca9bb3fa504 {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 2441.500730] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid 246b5346-93b1-4a84-921c-d028f3554d3d {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 2441.500890] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid 6c4d6006-656b-4d30-a595-8985d7f6b1e7 {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 2441.501058] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid 3832508d-5d12-42a2-93d8-61775907b2d2 {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 2441.501216] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Triggering sync for uuid 1520999b-e3e9-41b3-82e4-91bb556e96c4 {{(pid=63279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10857}} [ 2441.501622] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "efddec10-b496-446e-a05a-72c9f2d86ed9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.501851] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2441.502145] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.502381] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2441.502663] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "246b5346-93b1-4a84-921c-d028f3554d3d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.502860] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "246b5346-93b1-4a84-921c-d028f3554d3d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2441.503056] env[63279]: INFO nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] During sync_power_state the instance has a pending task (resize_migrating). Skip. [ 2441.503229] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "246b5346-93b1-4a84-921c-d028f3554d3d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2441.503450] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.503684] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2441.503932] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "3832508d-5d12-42a2-93d8-61775907b2d2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.504156] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.505054] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cc7c2b-a5fa-48c2-81a8-86971a428e45 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.508367] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1891a732-1061-4b04-b731-01cb39245436 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.511612] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88778225-8e6e-4abf-9d8e-e9cd95c3d35b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.690621] env[63279]: DEBUG nova.compute.utils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2441.695231] env[63279]: DEBUG nova.compute.manager [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2441.695505] env[63279]: DEBUG nova.network.neutron [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2441.749940] env[63279]: DEBUG nova.policy [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7cd64afda9f4eb488bdf76a36f2fee6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f9427c264e8e41998f579af352cb48cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2441.806314] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088228, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461475} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2441.806798] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 3832508d-5d12-42a2-93d8-61775907b2d2/3832508d-5d12-42a2-93d8-61775907b2d2.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2441.806798] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2441.806965] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1138cfbf-eb1f-4dbb-8251-80ceb443547b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.813343] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2441.813343] env[63279]: value = "task-2088229" [ 2441.813343] env[63279]: _type = "Task" [ 2441.813343] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2441.821471] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088229, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2442.026333] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.522s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2442.027102] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.525s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2442.027490] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.525s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2442.069328] env[63279]: DEBUG nova.network.neutron [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Successfully created port: c82cf29b-2996-4e7e-a280-4c7943a6865a {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2442.195663] env[63279]: DEBUG nova.compute.manager [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2442.315663] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b80febc-02a9-4227-ac49-5b6b438855f6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.328386] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec271e4-03a9-4854-b096-91c0ee3f70b4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.331845] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088229, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090653} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2442.331845] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2442.332893] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d6711b-c956-49e3-a706-21378f6110e1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.359319] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9736a6df-4a1e-4136-9e5f-cee834087c0a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.378106] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 3832508d-5d12-42a2-93d8-61775907b2d2/3832508d-5d12-42a2-93d8-61775907b2d2.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2442.378895] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33bf931f-87d2-4293-bacd-db294c159916 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.396642] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270c21fe-216a-400e-a824-d453c5eeb03b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.401098] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2442.401098] env[63279]: value = "task-2088230" [ 2442.401098] env[63279]: _type = "Task" [ 2442.401098] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2442.408460] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088230, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2442.415973] env[63279]: DEBUG nova.compute.provider_tree [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2442.546893] env[63279]: DEBUG nova.network.neutron [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Port be3c9abe-2016-4ca7-9982-16776729e694 binding to destination host cpu-1 is already ACTIVE {{(pid=63279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2442.913115] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088230, 'name': ReconfigVM_Task, 'duration_secs': 0.270465} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2442.913429] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 3832508d-5d12-42a2-93d8-61775907b2d2/3832508d-5d12-42a2-93d8-61775907b2d2.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2442.914065] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e3fec37-a27d-4c84-8324-cbf63898ffc6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.918633] env[63279]: DEBUG nova.scheduler.client.report [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2442.922921] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2442.922921] env[63279]: value = "task-2088231" [ 2442.922921] env[63279]: _type = "Task" [ 2442.922921] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2442.931668] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088231, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2443.208763] env[63279]: DEBUG nova.compute.manager [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2443.234317] env[63279]: DEBUG nova.virt.hardware [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2443.234605] env[63279]: DEBUG nova.virt.hardware [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2443.234788] env[63279]: DEBUG nova.virt.hardware [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2443.234976] env[63279]: DEBUG nova.virt.hardware [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2443.235144] env[63279]: DEBUG nova.virt.hardware [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2443.235297] env[63279]: DEBUG nova.virt.hardware [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2443.235508] env[63279]: DEBUG nova.virt.hardware [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2443.235673] env[63279]: DEBUG nova.virt.hardware [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2443.235841] env[63279]: DEBUG nova.virt.hardware [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2443.236010] env[63279]: DEBUG nova.virt.hardware [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2443.236195] env[63279]: DEBUG nova.virt.hardware [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2443.237029] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9fd9c4-9a59-4c38-961f-cd9724fdd775 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2443.244968] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97364f1d-8791-42df-84fe-bdea74e6fe7c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2443.427932] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.244s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2443.436321] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088231, 'name': Rename_Task, 'duration_secs': 0.177952} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2443.436581] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2443.436822] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f35fab9a-6eb0-44f6-b1ac-7430ec7a8285 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2443.447826] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2443.447826] env[63279]: value = "task-2088232" [ 2443.447826] env[63279]: _type = "Task" [ 2443.447826] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2443.459815] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088232, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2443.526059] env[63279]: DEBUG nova.compute.manager [req-fe578988-add9-4d0a-a3d5-7a6192cc3c7f req-0e926e79-5d2c-4e8c-8c9c-a77e47bd0e6a service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Received event network-vif-plugged-c82cf29b-2996-4e7e-a280-4c7943a6865a {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2443.526487] env[63279]: DEBUG oslo_concurrency.lockutils [req-fe578988-add9-4d0a-a3d5-7a6192cc3c7f req-0e926e79-5d2c-4e8c-8c9c-a77e47bd0e6a service nova] Acquiring lock "1520999b-e3e9-41b3-82e4-91bb556e96c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2443.526487] env[63279]: DEBUG oslo_concurrency.lockutils [req-fe578988-add9-4d0a-a3d5-7a6192cc3c7f req-0e926e79-5d2c-4e8c-8c9c-a77e47bd0e6a service nova] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2443.526761] env[63279]: DEBUG oslo_concurrency.lockutils [req-fe578988-add9-4d0a-a3d5-7a6192cc3c7f req-0e926e79-5d2c-4e8c-8c9c-a77e47bd0e6a service nova] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2443.526844] env[63279]: DEBUG nova.compute.manager [req-fe578988-add9-4d0a-a3d5-7a6192cc3c7f req-0e926e79-5d2c-4e8c-8c9c-a77e47bd0e6a service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] No waiting events found dispatching network-vif-plugged-c82cf29b-2996-4e7e-a280-4c7943a6865a {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2443.526993] env[63279]: WARNING nova.compute.manager [req-fe578988-add9-4d0a-a3d5-7a6192cc3c7f req-0e926e79-5d2c-4e8c-8c9c-a77e47bd0e6a service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Received unexpected event network-vif-plugged-c82cf29b-2996-4e7e-a280-4c7943a6865a for instance with vm_state building and task_state spawning. [ 2443.568074] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "246b5346-93b1-4a84-921c-d028f3554d3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2443.568316] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "246b5346-93b1-4a84-921c-d028f3554d3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2443.568492] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "246b5346-93b1-4a84-921c-d028f3554d3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2443.618978] env[63279]: DEBUG nova.network.neutron [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Successfully updated port: c82cf29b-2996-4e7e-a280-4c7943a6865a {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2443.930827] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Acquiring lock "e500a4f5-dc93-4333-971d-243265be1514" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2443.932089] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Lock "e500a4f5-dc93-4333-971d-243265be1514" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2443.959385] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088232, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2444.121291] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "refresh_cache-1520999b-e3e9-41b3-82e4-91bb556e96c4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2444.121503] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "refresh_cache-1520999b-e3e9-41b3-82e4-91bb556e96c4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2444.121665] env[63279]: DEBUG nova.network.neutron [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2444.434311] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Lock "e500a4f5-dc93-4333-971d-243265be1514" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.503s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2444.434639] env[63279]: DEBUG nova.compute.manager [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2444.458647] env[63279]: DEBUG oslo_vmware.api [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088232, 'name': PowerOnVM_Task, 'duration_secs': 0.628792} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2444.458980] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2444.459206] env[63279]: INFO nova.compute.manager [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Took 12.96 seconds to spawn the instance on the hypervisor. [ 2444.459392] env[63279]: DEBUG nova.compute.manager [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2444.460177] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a17aab-c81d-4529-9185-c398f331318d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.602913] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2444.603128] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2444.603315] env[63279]: DEBUG nova.network.neutron [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2444.666545] env[63279]: DEBUG nova.network.neutron [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2444.821325] env[63279]: DEBUG nova.network.neutron [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Updating instance_info_cache with network_info: [{"id": "c82cf29b-2996-4e7e-a280-4c7943a6865a", "address": "fa:16:3e:8a:ea:54", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc82cf29b-29", "ovs_interfaceid": "c82cf29b-2996-4e7e-a280-4c7943a6865a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2444.939588] env[63279]: DEBUG nova.compute.utils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2444.942496] env[63279]: DEBUG nova.compute.manager [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2444.942496] env[63279]: DEBUG nova.network.neutron [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2444.962577] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2444.962831] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2444.979855] env[63279]: INFO nova.compute.manager [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Took 17.71 seconds to build instance. [ 2445.013145] env[63279]: DEBUG nova.policy [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a1ffc10d1304e9d8cdc2960f2dd1a27', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e288b2cbdfec44a7a3728fc459eebe6c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2445.151682] env[63279]: DEBUG nova.compute.manager [req-026c9e04-41be-45a7-a5dd-1a2881473450 req-210de9ad-c71b-4217-98f2-381ce269136d service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Received event network-changed-07a9377a-5b9b-41f4-9f94-eb47c9b38bc4 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2445.152149] env[63279]: DEBUG nova.compute.manager [req-026c9e04-41be-45a7-a5dd-1a2881473450 req-210de9ad-c71b-4217-98f2-381ce269136d service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Refreshing instance network info cache due to event network-changed-07a9377a-5b9b-41f4-9f94-eb47c9b38bc4. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2445.153880] env[63279]: DEBUG oslo_concurrency.lockutils [req-026c9e04-41be-45a7-a5dd-1a2881473450 req-210de9ad-c71b-4217-98f2-381ce269136d service nova] Acquiring lock "refresh_cache-3832508d-5d12-42a2-93d8-61775907b2d2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2445.154241] env[63279]: DEBUG oslo_concurrency.lockutils [req-026c9e04-41be-45a7-a5dd-1a2881473450 req-210de9ad-c71b-4217-98f2-381ce269136d service nova] Acquired lock "refresh_cache-3832508d-5d12-42a2-93d8-61775907b2d2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2445.154574] env[63279]: DEBUG nova.network.neutron [req-026c9e04-41be-45a7-a5dd-1a2881473450 req-210de9ad-c71b-4217-98f2-381ce269136d service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Refreshing network info cache for port 07a9377a-5b9b-41f4-9f94-eb47c9b38bc4 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2445.324752] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "refresh_cache-1520999b-e3e9-41b3-82e4-91bb556e96c4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2445.325217] env[63279]: DEBUG nova.compute.manager [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Instance network_info: |[{"id": "c82cf29b-2996-4e7e-a280-4c7943a6865a", "address": "fa:16:3e:8a:ea:54", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc82cf29b-29", "ovs_interfaceid": "c82cf29b-2996-4e7e-a280-4c7943a6865a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2445.325780] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:ea:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c82cf29b-2996-4e7e-a280-4c7943a6865a', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2445.339278] env[63279]: DEBUG oslo.service.loopingcall [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2445.342839] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2445.343182] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-084552be-bbad-4f72-8261-2d7561f8dabe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2445.373915] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2445.373915] env[63279]: value = "task-2088233" [ 2445.373915] env[63279]: _type = "Task" [ 2445.373915] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2445.387019] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088233, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2445.444312] env[63279]: DEBUG nova.compute.manager [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2445.473823] env[63279]: DEBUG nova.network.neutron [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Successfully created port: b6904016-bb41-4c01-9e4b-dd2802955058 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2445.481976] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c89442d-9f03-49f5-a3d1-fda65e70c8ce tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.219s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2445.482244] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.978s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2445.482491] env[63279]: INFO nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] During sync_power_state the instance has a pending task (spawning). Skip. [ 2445.482651] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2445.541907] env[63279]: DEBUG nova.network.neutron [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance_info_cache with network_info: [{"id": "be3c9abe-2016-4ca7-9982-16776729e694", "address": "fa:16:3e:95:a8:19", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3c9abe-20", "ovs_interfaceid": "be3c9abe-2016-4ca7-9982-16776729e694", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2445.551331] env[63279]: DEBUG nova.compute.manager [req-a989282a-f85b-4ba7-a68a-c188fc8d5a6a req-14765e71-9cbd-47e7-9a18-2141eaca6993 service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Received event network-changed-c82cf29b-2996-4e7e-a280-4c7943a6865a {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2445.551331] env[63279]: DEBUG nova.compute.manager [req-a989282a-f85b-4ba7-a68a-c188fc8d5a6a req-14765e71-9cbd-47e7-9a18-2141eaca6993 service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Refreshing instance network info cache due to event network-changed-c82cf29b-2996-4e7e-a280-4c7943a6865a. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2445.551513] env[63279]: DEBUG oslo_concurrency.lockutils [req-a989282a-f85b-4ba7-a68a-c188fc8d5a6a req-14765e71-9cbd-47e7-9a18-2141eaca6993 service nova] Acquiring lock "refresh_cache-1520999b-e3e9-41b3-82e4-91bb556e96c4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2445.551830] env[63279]: DEBUG oslo_concurrency.lockutils [req-a989282a-f85b-4ba7-a68a-c188fc8d5a6a req-14765e71-9cbd-47e7-9a18-2141eaca6993 service nova] Acquired lock "refresh_cache-1520999b-e3e9-41b3-82e4-91bb556e96c4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2445.551830] env[63279]: DEBUG nova.network.neutron [req-a989282a-f85b-4ba7-a68a-c188fc8d5a6a req-14765e71-9cbd-47e7-9a18-2141eaca6993 service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Refreshing network info cache for port c82cf29b-2996-4e7e-a280-4c7943a6865a {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2445.883910] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088233, 'name': CreateVM_Task} progress is 99%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2445.939349] env[63279]: DEBUG nova.network.neutron [req-026c9e04-41be-45a7-a5dd-1a2881473450 req-210de9ad-c71b-4217-98f2-381ce269136d service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Updated VIF entry in instance network info cache for port 07a9377a-5b9b-41f4-9f94-eb47c9b38bc4. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2445.939876] env[63279]: DEBUG nova.network.neutron [req-026c9e04-41be-45a7-a5dd-1a2881473450 req-210de9ad-c71b-4217-98f2-381ce269136d service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Updating instance_info_cache with network_info: [{"id": "07a9377a-5b9b-41f4-9f94-eb47c9b38bc4", "address": "fa:16:3e:4b:19:18", "network": {"id": "f7000655-b20b-461d-9d08-f4cb8a85522e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-686033866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7674483744fe490b8cbe75532dfad77c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07a9377a-5b", "ovs_interfaceid": "07a9377a-5b9b-41f4-9f94-eb47c9b38bc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2446.044346] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2446.242498] env[63279]: DEBUG nova.network.neutron [req-a989282a-f85b-4ba7-a68a-c188fc8d5a6a req-14765e71-9cbd-47e7-9a18-2141eaca6993 service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Updated VIF entry in instance network info cache for port c82cf29b-2996-4e7e-a280-4c7943a6865a. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2446.242713] env[63279]: DEBUG nova.network.neutron [req-a989282a-f85b-4ba7-a68a-c188fc8d5a6a req-14765e71-9cbd-47e7-9a18-2141eaca6993 service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Updating instance_info_cache with network_info: [{"id": "c82cf29b-2996-4e7e-a280-4c7943a6865a", "address": "fa:16:3e:8a:ea:54", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc82cf29b-29", "ovs_interfaceid": "c82cf29b-2996-4e7e-a280-4c7943a6865a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2446.385964] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088233, 'name': CreateVM_Task, 'duration_secs': 0.516687} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2446.386233] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2446.387069] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2446.387328] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2446.387725] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2446.388081] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80308ec1-a2d2-4e59-bdba-3d164005e5d9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.393475] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2446.393475] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52ae7069-9df7-29ff-0f6a-2bfb4738fdf7" [ 2446.393475] env[63279]: _type = "Task" [ 2446.393475] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2446.401797] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ae7069-9df7-29ff-0f6a-2bfb4738fdf7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2446.440550] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2446.440811] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 2446.442909] env[63279]: DEBUG oslo_concurrency.lockutils [req-026c9e04-41be-45a7-a5dd-1a2881473450 req-210de9ad-c71b-4217-98f2-381ce269136d service nova] Releasing lock "refresh_cache-3832508d-5d12-42a2-93d8-61775907b2d2" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2446.454129] env[63279]: DEBUG nova.compute.manager [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2446.482565] env[63279]: DEBUG nova.virt.hardware [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2446.482814] env[63279]: DEBUG nova.virt.hardware [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2446.482979] env[63279]: DEBUG nova.virt.hardware [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2446.483179] env[63279]: DEBUG nova.virt.hardware [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2446.483333] env[63279]: DEBUG nova.virt.hardware [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2446.483484] env[63279]: DEBUG nova.virt.hardware [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2446.483702] env[63279]: DEBUG nova.virt.hardware [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2446.483865] env[63279]: DEBUG nova.virt.hardware [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2446.484051] env[63279]: DEBUG nova.virt.hardware [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2446.484223] env[63279]: DEBUG nova.virt.hardware [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2446.484400] env[63279]: DEBUG nova.virt.hardware [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2446.485266] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5184763b-a2b3-4cc8-84c5-96b16249022b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.493589] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6ff16f-95ee-4ccd-8979-04c365b863dd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.553555] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde50089-1ce2-4249-897f-a5f78dc6ccdc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.560536] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57811fcc-8a8a-497f-85c7-cd89b78f6234 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.745151] env[63279]: DEBUG oslo_concurrency.lockutils [req-a989282a-f85b-4ba7-a68a-c188fc8d5a6a req-14765e71-9cbd-47e7-9a18-2141eaca6993 service nova] Releasing lock "refresh_cache-1520999b-e3e9-41b3-82e4-91bb556e96c4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2446.905913] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52ae7069-9df7-29ff-0f6a-2bfb4738fdf7, 'name': SearchDatastore_Task, 'duration_secs': 0.00959} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2446.906234] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2446.906465] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2446.906701] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2446.906849] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2446.907038] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2446.907299] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-979b36d7-f695-4708-9e26-5574fcfaa542 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.915744] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2446.915922] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2446.916624] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba6abbd8-0376-49f0-8de2-ea4b160673f1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.921824] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2446.921824] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52fab9f2-f66c-c346-65f6-2a5833001af3" [ 2446.921824] env[63279]: _type = "Task" [ 2446.921824] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2446.929474] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fab9f2-f66c-c346-65f6-2a5833001af3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2447.050303] env[63279]: DEBUG nova.network.neutron [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Successfully updated port: b6904016-bb41-4c01-9e4b-dd2802955058 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2447.432241] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52fab9f2-f66c-c346-65f6-2a5833001af3, 'name': SearchDatastore_Task, 'duration_secs': 0.008425} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2447.433047] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f775723-f79b-4257-97fa-5be82e75caa6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2447.438438] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2447.438438] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52861340-6ea9-599d-a073-08d51dec11a8" [ 2447.438438] env[63279]: _type = "Task" [ 2447.438438] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2447.445741] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52861340-6ea9-599d-a073-08d51dec11a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2447.553552] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Acquiring lock "refresh_cache-c24509d7-0ef2-4a5e-843d-c843888118cc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2447.553835] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Acquired lock "refresh_cache-c24509d7-0ef2-4a5e-843d-c843888118cc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2447.554016] env[63279]: DEBUG nova.network.neutron [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2447.579180] env[63279]: DEBUG nova.compute.manager [req-d5a1a8f8-d749-4da9-9569-1530db5164b0 req-a0abd090-10f8-4f85-9612-47f6bb49449f service nova] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Received event network-vif-plugged-b6904016-bb41-4c01-9e4b-dd2802955058 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2447.579477] env[63279]: DEBUG oslo_concurrency.lockutils [req-d5a1a8f8-d749-4da9-9569-1530db5164b0 req-a0abd090-10f8-4f85-9612-47f6bb49449f service nova] Acquiring lock "c24509d7-0ef2-4a5e-843d-c843888118cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2447.579627] env[63279]: DEBUG oslo_concurrency.lockutils [req-d5a1a8f8-d749-4da9-9569-1530db5164b0 req-a0abd090-10f8-4f85-9612-47f6bb49449f service nova] Lock "c24509d7-0ef2-4a5e-843d-c843888118cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2447.579787] env[63279]: DEBUG oslo_concurrency.lockutils [req-d5a1a8f8-d749-4da9-9569-1530db5164b0 req-a0abd090-10f8-4f85-9612-47f6bb49449f service nova] Lock "c24509d7-0ef2-4a5e-843d-c843888118cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2447.579961] env[63279]: DEBUG nova.compute.manager [req-d5a1a8f8-d749-4da9-9569-1530db5164b0 req-a0abd090-10f8-4f85-9612-47f6bb49449f service nova] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] No waiting events found dispatching network-vif-plugged-b6904016-bb41-4c01-9e4b-dd2802955058 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2447.580313] env[63279]: WARNING nova.compute.manager [req-d5a1a8f8-d749-4da9-9569-1530db5164b0 req-a0abd090-10f8-4f85-9612-47f6bb49449f service nova] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Received unexpected event network-vif-plugged-b6904016-bb41-4c01-9e4b-dd2802955058 for instance with vm_state building and task_state spawning. [ 2447.580501] env[63279]: DEBUG nova.compute.manager [req-d5a1a8f8-d749-4da9-9569-1530db5164b0 req-a0abd090-10f8-4f85-9612-47f6bb49449f service nova] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Received event network-changed-b6904016-bb41-4c01-9e4b-dd2802955058 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2447.580666] env[63279]: DEBUG nova.compute.manager [req-d5a1a8f8-d749-4da9-9569-1530db5164b0 req-a0abd090-10f8-4f85-9612-47f6bb49449f service nova] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Refreshing instance network info cache due to event network-changed-b6904016-bb41-4c01-9e4b-dd2802955058. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2447.580839] env[63279]: DEBUG oslo_concurrency.lockutils [req-d5a1a8f8-d749-4da9-9569-1530db5164b0 req-a0abd090-10f8-4f85-9612-47f6bb49449f service nova] Acquiring lock "refresh_cache-c24509d7-0ef2-4a5e-843d-c843888118cc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2447.663875] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcbc385-c811-497b-81e7-49d46b20601a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2447.685047] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c46a43e-d82b-48ca-8dc9-bed7f3bc90e5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2447.692482] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance '246b5346-93b1-4a84-921c-d028f3554d3d' progress to 83 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2447.950276] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52861340-6ea9-599d-a073-08d51dec11a8, 'name': SearchDatastore_Task, 'duration_secs': 0.00934} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2447.950806] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2447.951139] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1520999b-e3e9-41b3-82e4-91bb556e96c4/1520999b-e3e9-41b3-82e4-91bb556e96c4.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2447.951492] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e2c27c4-995b-4313-bbe5-38bba20719b8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2447.958271] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2447.958271] env[63279]: value = "task-2088234" [ 2447.958271] env[63279]: _type = "Task" [ 2447.958271] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2447.968139] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088234, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2448.086364] env[63279]: DEBUG nova.network.neutron [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2448.199553] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2448.199885] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-469dbd42-2e9b-46f9-aa31-a83869b55e43 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2448.208502] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2448.208502] env[63279]: value = "task-2088235" [ 2448.208502] env[63279]: _type = "Task" [ 2448.208502] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2448.217686] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088235, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2448.268806] env[63279]: DEBUG nova.network.neutron [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Updating instance_info_cache with network_info: [{"id": "b6904016-bb41-4c01-9e4b-dd2802955058", "address": "fa:16:3e:36:73:0c", "network": {"id": "b8a8fdfb-8586-4905-8b4a-06f48f09cfca", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-187080690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e288b2cbdfec44a7a3728fc459eebe6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6904016-bb", "ovs_interfaceid": "b6904016-bb41-4c01-9e4b-dd2802955058", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2448.469076] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088234, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458077} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2448.469318] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1520999b-e3e9-41b3-82e4-91bb556e96c4/1520999b-e3e9-41b3-82e4-91bb556e96c4.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2448.469533] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2448.469791] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6adc360e-d2b3-4213-922c-0ea46aae6d07 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2448.476030] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2448.476030] env[63279]: value = "task-2088236" [ 2448.476030] env[63279]: _type = "Task" [ 2448.476030] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2448.483495] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088236, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2448.718288] env[63279]: DEBUG oslo_vmware.api [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088235, 'name': PowerOnVM_Task, 'duration_secs': 0.420113} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2448.718654] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2448.718784] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7341b9eb-5a1e-4e6c-8f09-562c50495fbf tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance '246b5346-93b1-4a84-921c-d028f3554d3d' progress to 100 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2448.771468] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Releasing lock "refresh_cache-c24509d7-0ef2-4a5e-843d-c843888118cc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2448.771772] env[63279]: DEBUG nova.compute.manager [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Instance network_info: |[{"id": "b6904016-bb41-4c01-9e4b-dd2802955058", "address": "fa:16:3e:36:73:0c", "network": {"id": "b8a8fdfb-8586-4905-8b4a-06f48f09cfca", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-187080690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e288b2cbdfec44a7a3728fc459eebe6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6904016-bb", "ovs_interfaceid": "b6904016-bb41-4c01-9e4b-dd2802955058", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2448.772082] env[63279]: DEBUG oslo_concurrency.lockutils [req-d5a1a8f8-d749-4da9-9569-1530db5164b0 req-a0abd090-10f8-4f85-9612-47f6bb49449f service nova] Acquired lock "refresh_cache-c24509d7-0ef2-4a5e-843d-c843888118cc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2448.772268] env[63279]: DEBUG nova.network.neutron [req-d5a1a8f8-d749-4da9-9569-1530db5164b0 req-a0abd090-10f8-4f85-9612-47f6bb49449f service nova] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Refreshing network info cache for port b6904016-bb41-4c01-9e4b-dd2802955058 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2448.773366] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:73:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7edb7c08-2fae-4df5-9ec6-5ccf06d7e337', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6904016-bb41-4c01-9e4b-dd2802955058', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2448.780577] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Creating folder: Project (e288b2cbdfec44a7a3728fc459eebe6c). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2448.783478] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eaf87320-19aa-48c3-95b5-f2e8137d8006 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2448.793886] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Created folder: Project (e288b2cbdfec44a7a3728fc459eebe6c) in parent group-v427491. [ 2448.794081] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Creating folder: Instances. Parent ref: group-v427802. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2448.794297] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d464233-6a5c-4709-a65d-4f51a35a027f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2448.802771] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Created folder: Instances in parent group-v427802. [ 2448.802990] env[63279]: DEBUG oslo.service.loopingcall [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2448.803189] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2448.803382] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-37ac7e34-b6b4-4d46-9bff-7d85930afba9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2448.824050] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2448.824050] env[63279]: value = "task-2088239" [ 2448.824050] env[63279]: _type = "Task" [ 2448.824050] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2448.832283] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088239, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2448.953790] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 2448.954030] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2448.954222] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2448.981579] env[63279]: DEBUG nova.network.neutron [req-d5a1a8f8-d749-4da9-9569-1530db5164b0 req-a0abd090-10f8-4f85-9612-47f6bb49449f service nova] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Updated VIF entry in instance network info cache for port b6904016-bb41-4c01-9e4b-dd2802955058. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2448.981976] env[63279]: DEBUG nova.network.neutron [req-d5a1a8f8-d749-4da9-9569-1530db5164b0 req-a0abd090-10f8-4f85-9612-47f6bb49449f service nova] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Updating instance_info_cache with network_info: [{"id": "b6904016-bb41-4c01-9e4b-dd2802955058", "address": "fa:16:3e:36:73:0c", "network": {"id": "b8a8fdfb-8586-4905-8b4a-06f48f09cfca", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-187080690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e288b2cbdfec44a7a3728fc459eebe6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6904016-bb", "ovs_interfaceid": "b6904016-bb41-4c01-9e4b-dd2802955058", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2448.986302] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088236, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061436} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2448.986740] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2448.987508] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbf9f04-2cfb-4f61-ade0-90dbba7dd6fb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.009520] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 1520999b-e3e9-41b3-82e4-91bb556e96c4/1520999b-e3e9-41b3-82e4-91bb556e96c4.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2449.010016] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cefb7c9-4f18-4102-9234-fc936c4b424c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.030665] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2449.030665] env[63279]: value = "task-2088240" [ 2449.030665] env[63279]: _type = "Task" [ 2449.030665] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2449.038526] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088240, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2449.333793] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088239, 'name': CreateVM_Task, 'duration_secs': 0.385135} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2449.333979] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2449.334663] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2449.334838] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2449.335551] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2449.335551] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0cbb624-73bd-4224-aa8b-5d32cb1ebbbd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.341009] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Waiting for the task: (returnval){ [ 2449.341009] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]520bf0cc-15a9-03b0-d94e-9386a4d19e9d" [ 2449.341009] env[63279]: _type = "Task" [ 2449.341009] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2449.348627] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520bf0cc-15a9-03b0-d94e-9386a4d19e9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2449.457518] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2449.457775] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2449.457951] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2449.458124] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2449.459007] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9690b5-7371-4761-ac84-b473d4d119ff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.467235] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f07d9f-519b-4115-9997-b9c3e1f3945e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.480520] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1298b591-b659-4ef5-8d36-1ce1896a6170 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.486942] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c7706b-8a39-4f5c-85ef-245b15016010 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.490122] env[63279]: DEBUG oslo_concurrency.lockutils [req-d5a1a8f8-d749-4da9-9569-1530db5164b0 req-a0abd090-10f8-4f85-9612-47f6bb49449f service nova] Releasing lock "refresh_cache-c24509d7-0ef2-4a5e-843d-c843888118cc" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2449.515313] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180298MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2449.515469] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2449.515677] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2449.538753] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088240, 'name': ReconfigVM_Task, 'duration_secs': 0.331565} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2449.538992] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 1520999b-e3e9-41b3-82e4-91bb556e96c4/1520999b-e3e9-41b3-82e4-91bb556e96c4.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2449.539663] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5e82c330-f1f7-423d-be88-15ee00e7dd61 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.545744] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2449.545744] env[63279]: value = "task-2088241" [ 2449.545744] env[63279]: _type = "Task" [ 2449.545744] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2449.553472] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088241, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2449.851300] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]520bf0cc-15a9-03b0-d94e-9386a4d19e9d, 'name': SearchDatastore_Task, 'duration_secs': 0.010127} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2449.851641] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2449.851868] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2449.852120] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2449.852272] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2449.852479] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2449.852754] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7bbe04a9-a250-4e68-93af-8b8e05910873 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.860602] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2449.860788] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2449.861514] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae483569-7f40-4f01-82be-9a13135d5b7a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.866462] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Waiting for the task: (returnval){ [ 2449.866462] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52e627ff-9576-545b-5e17-dada31e71642" [ 2449.866462] env[63279]: _type = "Task" [ 2449.866462] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2449.873955] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e627ff-9576-545b-5e17-dada31e71642, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2450.055319] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088241, 'name': Rename_Task, 'duration_secs': 0.139021} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2450.055594] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2450.055828] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2c89b67-8ae6-47fc-b306-83a7dde75930 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2450.061366] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2450.061366] env[63279]: value = "task-2088242" [ 2450.061366] env[63279]: _type = "Task" [ 2450.061366] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2450.068399] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088242, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2450.376390] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52e627ff-9576-545b-5e17-dada31e71642, 'name': SearchDatastore_Task, 'duration_secs': 0.007897} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2450.377463] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eda8e89a-1363-4c2b-8359-8bab7e919d59 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2450.382501] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Waiting for the task: (returnval){ [ 2450.382501] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5295c5b6-7a56-6369-bf1b-eb2e638b7a42" [ 2450.382501] env[63279]: _type = "Task" [ 2450.382501] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2450.390242] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5295c5b6-7a56-6369-bf1b-eb2e638b7a42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2450.524297] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Applying migration context for instance 246b5346-93b1-4a84-921c-d028f3554d3d as it has an incoming, in-progress migration 5f8a398a-1225-4b02-a73e-9bf01aba7b68. Migration status is finished {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2450.525429] env[63279]: INFO nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating resource usage from migration 5f8a398a-1225-4b02-a73e-9bf01aba7b68 [ 2450.548463] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance befc985f-68e2-4a04-8de0-9ca9bb3fa504 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2450.548668] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance efddec10-b496-446e-a05a-72c9f2d86ed9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2450.548794] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 6c4d6006-656b-4d30-a595-8985d7f6b1e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2450.548916] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 3832508d-5d12-42a2-93d8-61775907b2d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2450.549049] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Migration 5f8a398a-1225-4b02-a73e-9bf01aba7b68 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2450.549203] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 246b5346-93b1-4a84-921c-d028f3554d3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2450.549285] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 1520999b-e3e9-41b3-82e4-91bb556e96c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2450.549388] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance c24509d7-0ef2-4a5e-843d-c843888118cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2450.549585] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2450.549723] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2450.572908] env[63279]: DEBUG oslo_vmware.api [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088242, 'name': PowerOnVM_Task, 'duration_secs': 0.472561} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2450.573034] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2450.573250] env[63279]: INFO nova.compute.manager [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Took 7.36 seconds to spawn the instance on the hypervisor. [ 2450.573468] env[63279]: DEBUG nova.compute.manager [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2450.574275] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-199d1c66-c5eb-47b6-843d-63c95e9b8e1a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2450.652274] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990ec0ea-e15e-45df-9195-61c7d73d95fe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2450.659775] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4eb857-78ca-4111-aa51-e0e68e63cbd5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2450.691607] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a4d9a9-0a78-459c-a9ba-995ae9c85c73 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2450.698630] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c348c26-53d0-4832-b673-db5ca6ef4f04 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2450.711796] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2450.892757] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5295c5b6-7a56-6369-bf1b-eb2e638b7a42, 'name': SearchDatastore_Task, 'duration_secs': 0.009908} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2450.893059] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2450.893301] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] c24509d7-0ef2-4a5e-843d-c843888118cc/c24509d7-0ef2-4a5e-843d-c843888118cc.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2450.893561] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82e8ed48-d8c4-4104-9bb5-913c1a328ccd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2450.900922] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Waiting for the task: (returnval){ [ 2450.900922] env[63279]: value = "task-2088243" [ 2450.900922] env[63279]: _type = "Task" [ 2450.900922] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2450.908235] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2451.095866] env[63279]: INFO nova.compute.manager [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Took 12.21 seconds to build instance. [ 2451.215027] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2451.411497] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088243, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2451.599957] env[63279]: DEBUG oslo_concurrency.lockutils [None req-6342cb35-e807-4fde-a464-f2623c7405da tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.721s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2451.602714] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 10.098s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2451.603688] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6d5316-5ea8-48c5-8e26-ae05d3af458b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2451.670022] env[63279]: DEBUG nova.network.neutron [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Port be3c9abe-2016-4ca7-9982-16776729e694 binding to destination host cpu-1 is already ACTIVE {{(pid=63279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2451.670333] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2451.670498] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2451.670691] env[63279]: DEBUG nova.network.neutron [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2451.719944] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2451.720158] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.204s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2451.912235] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088243, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2452.114187] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.511s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2452.207176] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2452.207404] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2452.295837] env[63279]: DEBUG nova.compute.manager [req-c1914700-555c-4995-9c4e-01c955518bb2 req-f46866a0-7b63-4cdd-b981-1d8603de87d0 service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Received event network-changed-c82cf29b-2996-4e7e-a280-4c7943a6865a {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2452.296057] env[63279]: DEBUG nova.compute.manager [req-c1914700-555c-4995-9c4e-01c955518bb2 req-f46866a0-7b63-4cdd-b981-1d8603de87d0 service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Refreshing instance network info cache due to event network-changed-c82cf29b-2996-4e7e-a280-4c7943a6865a. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2452.296279] env[63279]: DEBUG oslo_concurrency.lockutils [req-c1914700-555c-4995-9c4e-01c955518bb2 req-f46866a0-7b63-4cdd-b981-1d8603de87d0 service nova] Acquiring lock "refresh_cache-1520999b-e3e9-41b3-82e4-91bb556e96c4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2452.296425] env[63279]: DEBUG oslo_concurrency.lockutils [req-c1914700-555c-4995-9c4e-01c955518bb2 req-f46866a0-7b63-4cdd-b981-1d8603de87d0 service nova] Acquired lock "refresh_cache-1520999b-e3e9-41b3-82e4-91bb556e96c4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2452.296590] env[63279]: DEBUG nova.network.neutron [req-c1914700-555c-4995-9c4e-01c955518bb2 req-f46866a0-7b63-4cdd-b981-1d8603de87d0 service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Refreshing network info cache for port c82cf29b-2996-4e7e-a280-4c7943a6865a {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2452.413750] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088243, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2452.440568] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2452.458980] env[63279]: DEBUG nova.network.neutron [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance_info_cache with network_info: [{"id": "be3c9abe-2016-4ca7-9982-16776729e694", "address": "fa:16:3e:95:a8:19", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3c9abe-20", "ovs_interfaceid": "be3c9abe-2016-4ca7-9982-16776729e694", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2452.913206] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088243, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.562684} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2452.913547] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] c24509d7-0ef2-4a5e-843d-c843888118cc/c24509d7-0ef2-4a5e-843d-c843888118cc.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2452.913693] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2452.913939] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-749dafdd-bf2a-42d8-9a1f-b9d6d048bb4d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2452.922663] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Waiting for the task: (returnval){ [ 2452.922663] env[63279]: value = "task-2088244" [ 2452.922663] env[63279]: _type = "Task" [ 2452.922663] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2452.929831] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088244, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2452.962060] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2453.012566] env[63279]: DEBUG nova.network.neutron [req-c1914700-555c-4995-9c4e-01c955518bb2 req-f46866a0-7b63-4cdd-b981-1d8603de87d0 service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Updated VIF entry in instance network info cache for port c82cf29b-2996-4e7e-a280-4c7943a6865a. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2453.012939] env[63279]: DEBUG nova.network.neutron [req-c1914700-555c-4995-9c4e-01c955518bb2 req-f46866a0-7b63-4cdd-b981-1d8603de87d0 service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Updating instance_info_cache with network_info: [{"id": "c82cf29b-2996-4e7e-a280-4c7943a6865a", "address": "fa:16:3e:8a:ea:54", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc82cf29b-29", "ovs_interfaceid": "c82cf29b-2996-4e7e-a280-4c7943a6865a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2453.432942] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088244, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.130623} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2453.434028] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2453.434028] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa67f006-3d56-412c-bc89-96d32e886852 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.455423] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] c24509d7-0ef2-4a5e-843d-c843888118cc/c24509d7-0ef2-4a5e-843d-c843888118cc.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2453.455725] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c8f88d4-ff3e-4abf-80d2-53360ee55a3f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.470669] env[63279]: DEBUG nova.compute.manager [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63279) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 2453.478903] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Waiting for the task: (returnval){ [ 2453.478903] env[63279]: value = "task-2088245" [ 2453.478903] env[63279]: _type = "Task" [ 2453.478903] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2453.487125] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088245, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2453.515912] env[63279]: DEBUG oslo_concurrency.lockutils [req-c1914700-555c-4995-9c4e-01c955518bb2 req-f46866a0-7b63-4cdd-b981-1d8603de87d0 service nova] Releasing lock "refresh_cache-1520999b-e3e9-41b3-82e4-91bb556e96c4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2453.989628] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088245, 'name': ReconfigVM_Task, 'duration_secs': 0.279325} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2453.989958] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Reconfigured VM instance instance-00000075 to attach disk [datastore1] c24509d7-0ef2-4a5e-843d-c843888118cc/c24509d7-0ef2-4a5e-843d-c843888118cc.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2453.990502] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6337fffd-866f-4418-848d-8fc5cc6c46c2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.996318] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Waiting for the task: (returnval){ [ 2453.996318] env[63279]: value = "task-2088246" [ 2453.996318] env[63279]: _type = "Task" [ 2453.996318] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2454.003809] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088246, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2454.506269] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088246, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2454.568055] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2454.568308] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2455.006872] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088246, 'name': Rename_Task, 'duration_secs': 0.581213} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2455.007203] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2455.007407] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea037ffc-ac40-4846-b8d7-10901e444e6b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2455.013497] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Waiting for the task: (returnval){ [ 2455.013497] env[63279]: value = "task-2088247" [ 2455.013497] env[63279]: _type = "Task" [ 2455.013497] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2455.020449] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088247, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2455.071264] env[63279]: DEBUG nova.objects.instance [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'migration_context' on Instance uuid 246b5346-93b1-4a84-921c-d028f3554d3d {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2455.523725] env[63279]: DEBUG oslo_vmware.api [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088247, 'name': PowerOnVM_Task, 'duration_secs': 0.497163} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2455.523958] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2455.524202] env[63279]: INFO nova.compute.manager [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Took 9.07 seconds to spawn the instance on the hypervisor. [ 2455.524388] env[63279]: DEBUG nova.compute.manager [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2455.525158] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903132fc-3a67-456a-a280-bc3c02e5754d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2455.678145] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75dca224-e571-4da7-960e-455954f15d78 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2455.685722] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147f5d36-5a6e-4e59-9249-cfbb2be35b2d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2455.715752] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9341f148-0869-4f38-b280-b0a107142832 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2455.722642] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43451af-0607-4740-87d4-62045b53731f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2455.736317] env[63279]: DEBUG nova.compute.provider_tree [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2456.040722] env[63279]: INFO nova.compute.manager [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Took 17.07 seconds to build instance. [ 2456.239481] env[63279]: DEBUG nova.scheduler.client.report [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2456.543302] env[63279]: DEBUG oslo_concurrency.lockutils [None req-27cdefc2-b38d-4179-a14e-4c849b15f7d9 tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Lock "c24509d7-0ef2-4a5e-843d-c843888118cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.580s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2456.846780] env[63279]: DEBUG oslo_concurrency.lockutils [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Acquiring lock "c24509d7-0ef2-4a5e-843d-c843888118cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2456.846780] env[63279]: DEBUG oslo_concurrency.lockutils [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Lock "c24509d7-0ef2-4a5e-843d-c843888118cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2456.846780] env[63279]: DEBUG oslo_concurrency.lockutils [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Acquiring lock "c24509d7-0ef2-4a5e-843d-c843888118cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2456.847029] env[63279]: DEBUG oslo_concurrency.lockutils [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Lock "c24509d7-0ef2-4a5e-843d-c843888118cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2456.847130] env[63279]: DEBUG oslo_concurrency.lockutils [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Lock "c24509d7-0ef2-4a5e-843d-c843888118cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2456.849555] env[63279]: INFO nova.compute.manager [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Terminating instance [ 2457.250419] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.682s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2457.353429] env[63279]: DEBUG nova.compute.manager [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2457.353637] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2457.354517] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8b041c-4e2f-4397-bf7c-578268c300bc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.362367] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2457.362591] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10f5a31b-61f7-4684-ad4e-51dd226eafd6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.368784] env[63279]: DEBUG oslo_vmware.api [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Waiting for the task: (returnval){ [ 2457.368784] env[63279]: value = "task-2088248" [ 2457.368784] env[63279]: _type = "Task" [ 2457.368784] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2457.376146] env[63279]: DEBUG oslo_vmware.api [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088248, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2457.878430] env[63279]: DEBUG oslo_vmware.api [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088248, 'name': PowerOffVM_Task, 'duration_secs': 0.20003} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2457.878782] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2457.879014] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2457.879311] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fbcafb9b-3f99-4da4-9795-d5bc9e2c3c6e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.991026] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2457.991334] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2457.991611] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Deleting the datastore file [datastore1] c24509d7-0ef2-4a5e-843d-c843888118cc {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2457.991915] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de1a6777-2b6b-47cd-8367-749c628e6677 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.999809] env[63279]: DEBUG oslo_vmware.api [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Waiting for the task: (returnval){ [ 2457.999809] env[63279]: value = "task-2088250" [ 2457.999809] env[63279]: _type = "Task" [ 2457.999809] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2458.007487] env[63279]: DEBUG oslo_vmware.api [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088250, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2458.509836] env[63279]: DEBUG oslo_vmware.api [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Task: {'id': task-2088250, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174948} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2458.510269] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2458.510476] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2458.510752] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2458.511055] env[63279]: INFO nova.compute.manager [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2458.511431] env[63279]: DEBUG oslo.service.loopingcall [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2458.511720] env[63279]: DEBUG nova.compute.manager [-] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2458.511854] env[63279]: DEBUG nova.network.neutron [-] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2458.758965] env[63279]: DEBUG nova.compute.manager [req-08d8d936-6da2-4440-abb5-dc427a62beb6 req-c7cbe223-43f3-4321-b28c-3f7c24ebc9b9 service nova] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Received event network-vif-deleted-b6904016-bb41-4c01-9e4b-dd2802955058 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2458.758965] env[63279]: INFO nova.compute.manager [req-08d8d936-6da2-4440-abb5-dc427a62beb6 req-c7cbe223-43f3-4321-b28c-3f7c24ebc9b9 service nova] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Neutron deleted interface b6904016-bb41-4c01-9e4b-dd2802955058; detaching it from the instance and deleting it from the info cache [ 2458.758965] env[63279]: DEBUG nova.network.neutron [req-08d8d936-6da2-4440-abb5-dc427a62beb6 req-c7cbe223-43f3-4321-b28c-3f7c24ebc9b9 service nova] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2458.786826] env[63279]: INFO nova.compute.manager [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Swapping old allocation on dict_keys(['0ba7c625-a0fc-4d3c-b804-196d00f00137']) held by migration 5f8a398a-1225-4b02-a73e-9bf01aba7b68 for instance [ 2458.808125] env[63279]: DEBUG nova.scheduler.client.report [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Overwriting current allocation {'allocations': {'0ba7c625-a0fc-4d3c-b804-196d00f00137': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 174}}, 'project_id': '996f8d6e14a14ac39f207eced547ef33', 'user_id': 'efd8e2a39ad843b79762189fcc8aa8ad', 'consumer_generation': 1} on consumer 246b5346-93b1-4a84-921c-d028f3554d3d {{(pid=63279) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 2458.882817] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2458.883061] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2458.883250] env[63279]: DEBUG nova.network.neutron [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2459.238808] env[63279]: DEBUG nova.network.neutron [-] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2459.263036] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-12e84cde-17a3-439d-a3a5-58081ad17b44 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.273226] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b091efd-0195-4c82-8976-fbea3ca01c6c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.301053] env[63279]: DEBUG nova.compute.manager [req-08d8d936-6da2-4440-abb5-dc427a62beb6 req-c7cbe223-43f3-4321-b28c-3f7c24ebc9b9 service nova] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Detach interface failed, port_id=b6904016-bb41-4c01-9e4b-dd2802955058, reason: Instance c24509d7-0ef2-4a5e-843d-c843888118cc could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2459.611899] env[63279]: DEBUG nova.network.neutron [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance_info_cache with network_info: [{"id": "be3c9abe-2016-4ca7-9982-16776729e694", "address": "fa:16:3e:95:a8:19", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3c9abe-20", "ovs_interfaceid": "be3c9abe-2016-4ca7-9982-16776729e694", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2459.741543] env[63279]: INFO nova.compute.manager [-] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Took 1.23 seconds to deallocate network for instance. [ 2460.115028] env[63279]: DEBUG oslo_concurrency.lockutils [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "refresh_cache-246b5346-93b1-4a84-921c-d028f3554d3d" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2460.115482] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4f0e2a-fca3-4432-bbf1-2a1d910307a4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.122550] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd60ddd5-4094-45c6-9794-073f84cd67c1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.248225] env[63279]: DEBUG oslo_concurrency.lockutils [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2460.248485] env[63279]: DEBUG oslo_concurrency.lockutils [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2460.248707] env[63279]: DEBUG nova.objects.instance [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Lazy-loading 'resources' on Instance uuid c24509d7-0ef2-4a5e-843d-c843888118cc {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2460.880924] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5671ee-a8cb-43ef-b065-e4b5ec5a22d9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.891284] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8dfedc7-2c88-49f9-9fb4-61ba62c1887b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.922042] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bfc7dd-26be-41a6-b774-090ab377bcde {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.929806] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2cbb63d-0d9d-4747-ade1-cf2d463f32d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.943281] env[63279]: DEBUG nova.compute.provider_tree [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2461.202951] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2461.203280] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38d6fdf1-8651-47f0-9157-6f23dac74808 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.211588] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2461.211588] env[63279]: value = "task-2088251" [ 2461.211588] env[63279]: _type = "Task" [ 2461.211588] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2461.219996] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088251, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2461.446720] env[63279]: DEBUG nova.scheduler.client.report [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2461.721309] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088251, 'name': PowerOffVM_Task, 'duration_secs': 0.263981} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2461.721573] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2461.722245] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2461.722467] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2461.722629] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2461.722824] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2461.722968] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2461.723137] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2461.723349] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2461.723510] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2461.723684] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2461.723849] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2461.724033] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2461.728810] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6ff0f18-c3fa-4df4-9482-b58fd1b99fa1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.743608] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2461.743608] env[63279]: value = "task-2088252" [ 2461.743608] env[63279]: _type = "Task" [ 2461.743608] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2461.750845] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088252, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2461.951969] env[63279]: DEBUG oslo_concurrency.lockutils [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.703s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2461.971725] env[63279]: INFO nova.scheduler.client.report [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Deleted allocations for instance c24509d7-0ef2-4a5e-843d-c843888118cc [ 2462.066448] env[63279]: DEBUG nova.compute.manager [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Stashing vm_state: active {{(pid=63279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2462.254100] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088252, 'name': ReconfigVM_Task, 'duration_secs': 0.222699} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2462.254917] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b486f9-5e21-4542-bf50-6399f37244d0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.275487] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2462.275715] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2462.275881] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2462.276084] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2462.276244] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2462.276398] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2462.276608] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2462.276777] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2462.276948] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2462.277130] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2462.277313] env[63279]: DEBUG nova.virt.hardware [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2462.278084] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e69b182b-9ab5-49bf-afb9-a5f90fdf8e93 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.283528] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2462.283528] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527949cd-75f6-c2b1-7d02-662640997aa5" [ 2462.283528] env[63279]: _type = "Task" [ 2462.283528] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2462.291037] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527949cd-75f6-c2b1-7d02-662640997aa5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2462.479070] env[63279]: DEBUG oslo_concurrency.lockutils [None req-92a5a236-2fa2-47e3-8801-71472e1121bd tempest-ServerGroupTestJSON-1647649028 tempest-ServerGroupTestJSON-1647649028-project-member] Lock "c24509d7-0ef2-4a5e-843d-c843888118cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.632s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2462.582810] env[63279]: DEBUG oslo_concurrency.lockutils [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2462.583099] env[63279]: DEBUG oslo_concurrency.lockutils [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2462.793733] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527949cd-75f6-c2b1-7d02-662640997aa5, 'name': SearchDatastore_Task, 'duration_secs': 0.008959} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2462.798940] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2462.799220] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c77fd68-412c-4d2f-9c65-80e26594c731 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.815973] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2462.815973] env[63279]: value = "task-2088253" [ 2462.815973] env[63279]: _type = "Task" [ 2462.815973] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2462.823125] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088253, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2463.088395] env[63279]: INFO nova.compute.claims [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2463.325344] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088253, 'name': ReconfigVM_Task, 'duration_secs': 0.304046} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2463.325657] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2463.326429] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557cfbf0-c7c0-4c7f-83a8-7fbd58d388e7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.351711] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 246b5346-93b1-4a84-921c-d028f3554d3d/246b5346-93b1-4a84-921c-d028f3554d3d.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2463.351989] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-755e9529-bd06-4ce9-a518-5524850442a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.369916] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2463.369916] env[63279]: value = "task-2088254" [ 2463.369916] env[63279]: _type = "Task" [ 2463.369916] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2463.378069] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088254, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2463.594941] env[63279]: INFO nova.compute.resource_tracker [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating resource usage from migration 334d7c87-2c28-45ff-a326-a7187552859b [ 2463.693515] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38687021-3db8-4763-9a5c-8a66b8c1e92c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.701818] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc6453b-d694-4645-8bac-7ef89827d542 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.731745] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2476706a-5682-4e74-9927-42e825e6fd83 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.740029] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058983cb-bd26-48b0-8a5b-6886b3c1c7d0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.755590] env[63279]: DEBUG nova.compute.provider_tree [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2463.881750] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088254, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2464.258362] env[63279]: DEBUG nova.scheduler.client.report [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2464.380809] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088254, 'name': ReconfigVM_Task, 'duration_secs': 0.619801} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2464.381037] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 246b5346-93b1-4a84-921c-d028f3554d3d/246b5346-93b1-4a84-921c-d028f3554d3d.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2464.382244] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0d1995-d91a-47df-94bf-6b46015f11c6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2464.403179] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0a7118-818b-47a3-8bd0-a51eec94e389 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2464.424625] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d4ffd7-4c55-4fcb-953c-335549794e7b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2464.447518] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3772f2-3842-4eb9-91b7-6ad6a524c09e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2464.455167] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2464.455417] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76a5b5b2-a01c-4feb-8c0f-a315ebb3e45e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2464.461686] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2464.461686] env[63279]: value = "task-2088255" [ 2464.461686] env[63279]: _type = "Task" [ 2464.461686] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2464.469472] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088255, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2464.763729] env[63279]: DEBUG oslo_concurrency.lockutils [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.180s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2464.765710] env[63279]: INFO nova.compute.manager [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Migrating [ 2464.973554] env[63279]: DEBUG oslo_vmware.api [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088255, 'name': PowerOnVM_Task, 'duration_secs': 0.472575} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2464.973803] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2465.279170] env[63279]: DEBUG oslo_concurrency.lockutils [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2465.279560] env[63279]: DEBUG oslo_concurrency.lockutils [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2465.279560] env[63279]: DEBUG nova.network.neutron [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2466.024455] env[63279]: DEBUG nova.network.neutron [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance_info_cache with network_info: [{"id": "548d4882-d674-412a-9b89-4691e9eda165", "address": "fa:16:3e:50:5f:f2", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap548d4882-d6", "ovs_interfaceid": "548d4882-d674-412a-9b89-4691e9eda165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2466.029427] env[63279]: INFO nova.compute.manager [None req-62d0e9e4-5a2c-4038-ae6d-5e47089198ff tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance to original state: 'active' [ 2466.527791] env[63279]: DEBUG oslo_concurrency.lockutils [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2467.575942] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "246b5346-93b1-4a84-921c-d028f3554d3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2467.576447] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "246b5346-93b1-4a84-921c-d028f3554d3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2467.576532] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "246b5346-93b1-4a84-921c-d028f3554d3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2467.576695] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "246b5346-93b1-4a84-921c-d028f3554d3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2467.576888] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "246b5346-93b1-4a84-921c-d028f3554d3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2467.578813] env[63279]: INFO nova.compute.manager [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Terminating instance [ 2468.042732] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-663f333a-b9ac-47bb-ac30-3dbbf9d3baed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.064029] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance '6c4d6006-656b-4d30-a595-8985d7f6b1e7' progress to 0 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2468.082659] env[63279]: DEBUG nova.compute.manager [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2468.082893] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2468.083184] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1dac7726-ff72-4f1f-bc3d-3bc597c01282 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.092918] env[63279]: DEBUG oslo_vmware.api [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2468.092918] env[63279]: value = "task-2088256" [ 2468.092918] env[63279]: _type = "Task" [ 2468.092918] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2468.101461] env[63279]: DEBUG oslo_vmware.api [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2468.568623] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2468.568919] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ae36350-3632-4081-b486-c98aea420d5a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.576633] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2468.576633] env[63279]: value = "task-2088257" [ 2468.576633] env[63279]: _type = "Task" [ 2468.576633] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2468.584658] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088257, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2468.602032] env[63279]: DEBUG oslo_vmware.api [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088256, 'name': PowerOffVM_Task, 'duration_secs': 0.216907} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2468.602336] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2468.602547] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2468.602749] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427799', 'volume_id': '30fc3415-0254-4b9f-a6bb-e4447562cfab', 'name': 'volume-30fc3415-0254-4b9f-a6bb-e4447562cfab', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '246b5346-93b1-4a84-921c-d028f3554d3d', 'attached_at': '2025-01-13T18:02:50.000000', 'detached_at': '', 'volume_id': '30fc3415-0254-4b9f-a6bb-e4447562cfab', 'serial': '30fc3415-0254-4b9f-a6bb-e4447562cfab'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2468.603553] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5a5b86-c5a7-4edc-8b14-3cdfaa2de874 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.624827] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3541d7-7030-4077-b665-5c3b487f25dc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.631108] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba63cfc2-4660-489e-9e33-a37008df7222 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.651230] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edff3197-adb0-41ac-87d3-301139e3e69e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.667473] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] The volume has not been displaced from its original location: [datastore1] volume-30fc3415-0254-4b9f-a6bb-e4447562cfab/volume-30fc3415-0254-4b9f-a6bb-e4447562cfab.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2468.672898] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Reconfiguring VM instance instance-0000006f to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2468.673260] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-263a1348-837f-4640-8a2b-7b146fd3bafb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.690732] env[63279]: DEBUG oslo_vmware.api [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2468.690732] env[63279]: value = "task-2088258" [ 2468.690732] env[63279]: _type = "Task" [ 2468.690732] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2468.698719] env[63279]: DEBUG oslo_vmware.api [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088258, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2468.883707] env[63279]: DEBUG oslo_concurrency.lockutils [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "efddec10-b496-446e-a05a-72c9f2d86ed9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2468.884033] env[63279]: DEBUG oslo_concurrency.lockutils [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2468.884276] env[63279]: DEBUG oslo_concurrency.lockutils [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "efddec10-b496-446e-a05a-72c9f2d86ed9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2468.884472] env[63279]: DEBUG oslo_concurrency.lockutils [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2468.884651] env[63279]: DEBUG oslo_concurrency.lockutils [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2468.886810] env[63279]: INFO nova.compute.manager [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Terminating instance [ 2469.086687] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088257, 'name': PowerOffVM_Task, 'duration_secs': 0.21975} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2469.086928] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2469.087137] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance '6c4d6006-656b-4d30-a595-8985d7f6b1e7' progress to 17 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2469.200674] env[63279]: DEBUG oslo_vmware.api [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088258, 'name': ReconfigVM_Task, 'duration_secs': 0.197785} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2469.200920] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Reconfigured VM instance instance-0000006f to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2469.205556] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49d788d3-1235-4978-8c92-da712ed6d346 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2469.220008] env[63279]: DEBUG oslo_vmware.api [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2469.220008] env[63279]: value = "task-2088259" [ 2469.220008] env[63279]: _type = "Task" [ 2469.220008] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2469.227270] env[63279]: DEBUG oslo_vmware.api [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088259, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2469.390228] env[63279]: DEBUG nova.compute.manager [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2469.390460] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2469.391366] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b14d6f-c1cf-4650-b9be-953bd88bb19d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2469.398563] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2469.398801] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-969a92e9-67be-4094-af8a-18784edfa7b3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2469.404578] env[63279]: DEBUG oslo_vmware.api [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2469.404578] env[63279]: value = "task-2088260" [ 2469.404578] env[63279]: _type = "Task" [ 2469.404578] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2469.411950] env[63279]: DEBUG oslo_vmware.api [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088260, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2469.593224] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2469.593636] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2469.593636] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2469.593810] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2469.593925] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2469.594166] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2469.594395] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2469.594567] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2469.594752] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2469.594937] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2469.595136] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2469.600276] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8dd8bc2-2fdb-46fa-a294-35484d7f5154 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2469.615746] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2469.615746] env[63279]: value = "task-2088261" [ 2469.615746] env[63279]: _type = "Task" [ 2469.615746] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2469.623694] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088261, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2469.729011] env[63279]: DEBUG oslo_vmware.api [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088259, 'name': ReconfigVM_Task, 'duration_secs': 0.13371} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2469.729329] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427799', 'volume_id': '30fc3415-0254-4b9f-a6bb-e4447562cfab', 'name': 'volume-30fc3415-0254-4b9f-a6bb-e4447562cfab', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '246b5346-93b1-4a84-921c-d028f3554d3d', 'attached_at': '2025-01-13T18:02:50.000000', 'detached_at': '', 'volume_id': '30fc3415-0254-4b9f-a6bb-e4447562cfab', 'serial': '30fc3415-0254-4b9f-a6bb-e4447562cfab'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2469.729708] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2469.730531] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889897be-463d-4679-ba7c-51fdf0b0709b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2469.736689] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2469.736915] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8fe9496a-65b3-46e5-8b1e-9f8e844741b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2469.914994] env[63279]: DEBUG oslo_vmware.api [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088260, 'name': PowerOffVM_Task, 'duration_secs': 0.177497} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2469.915305] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2469.915519] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2469.915724] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82dd4dfe-0db4-430f-99af-59cb55e072e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2470.126072] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088261, 'name': ReconfigVM_Task, 'duration_secs': 0.176888} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2470.126513] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance '6c4d6006-656b-4d30-a595-8985d7f6b1e7' progress to 33 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2470.633343] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2470.633822] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2470.633894] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2470.634074] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2470.634234] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2470.634387] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2470.634592] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2470.634785] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2470.634988] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2470.635202] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2470.635385] env[63279]: DEBUG nova.virt.hardware [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2470.640608] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Reconfiguring VM instance instance-00000072 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2470.640867] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb0fe62e-2f99-43c0-af6b-ea7b52644547 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2470.659164] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2470.659164] env[63279]: value = "task-2088264" [ 2470.659164] env[63279]: _type = "Task" [ 2470.659164] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2470.667472] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088264, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2471.169868] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088264, 'name': ReconfigVM_Task, 'duration_secs': 0.149456} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2471.170147] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Reconfigured VM instance instance-00000072 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2471.170858] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c949d240-2516-40de-8c33-e8cec8fd0d6f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.191837] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 6c4d6006-656b-4d30-a595-8985d7f6b1e7/6c4d6006-656b-4d30-a595-8985d7f6b1e7.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2471.192079] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53e2f253-60e7-4663-8151-b58986674f52 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.209789] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2471.209789] env[63279]: value = "task-2088265" [ 2471.209789] env[63279]: _type = "Task" [ 2471.209789] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2471.216943] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088265, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2471.719700] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088265, 'name': ReconfigVM_Task, 'duration_secs': 0.29759} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2471.720188] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 6c4d6006-656b-4d30-a595-8985d7f6b1e7/6c4d6006-656b-4d30-a595-8985d7f6b1e7.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2471.720306] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance '6c4d6006-656b-4d30-a595-8985d7f6b1e7' progress to 50 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2472.227221] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be836864-a7ea-4811-ad69-b2ebcfdd4b49 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.245473] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3062d48a-ecc4-4d14-adbb-d1b96cefa629 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.261983] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance '6c4d6006-656b-4d30-a595-8985d7f6b1e7' progress to 67 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2472.800421] env[63279]: DEBUG nova.network.neutron [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Port 548d4882-d674-412a-9b89-4691e9eda165 binding to destination host cpu-1 is already ACTIVE {{(pid=63279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2473.822178] env[63279]: DEBUG oslo_concurrency.lockutils [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2473.822539] env[63279]: DEBUG oslo_concurrency.lockutils [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2473.822586] env[63279]: DEBUG oslo_concurrency.lockutils [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2474.858436] env[63279]: DEBUG oslo_concurrency.lockutils [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2474.858772] env[63279]: DEBUG oslo_concurrency.lockutils [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2474.858816] env[63279]: DEBUG nova.network.neutron [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2475.629985] env[63279]: DEBUG nova.network.neutron [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance_info_cache with network_info: [{"id": "548d4882-d674-412a-9b89-4691e9eda165", "address": "fa:16:3e:50:5f:f2", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap548d4882-d6", "ovs_interfaceid": "548d4882-d674-412a-9b89-4691e9eda165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2475.884343] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2475.884770] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2475.884985] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleting the datastore file [datastore1] 246b5346-93b1-4a84-921c-d028f3554d3d {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2475.885976] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b44abc9f-c5d9-4a3d-b1d3-d8b8e08cc36a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.887871] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2475.888077] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2475.888254] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleting the datastore file [datastore1] efddec10-b496-446e-a05a-72c9f2d86ed9 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2475.888489] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6095579d-edbf-44fc-a133-d3ec717eb8fb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.895573] env[63279]: DEBUG oslo_vmware.api [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2475.895573] env[63279]: value = "task-2088266" [ 2475.895573] env[63279]: _type = "Task" [ 2475.895573] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2475.896752] env[63279]: DEBUG oslo_vmware.api [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for the task: (returnval){ [ 2475.896752] env[63279]: value = "task-2088267" [ 2475.896752] env[63279]: _type = "Task" [ 2475.896752] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2475.907373] env[63279]: DEBUG oslo_vmware.api [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088267, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2475.910169] env[63279]: DEBUG oslo_vmware.api [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088266, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2476.133212] env[63279]: DEBUG oslo_concurrency.lockutils [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2476.409150] env[63279]: DEBUG oslo_vmware.api [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Task: {'id': task-2088267, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139193} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2476.411923] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2476.412138] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2476.412319] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2476.412495] env[63279]: INFO nova.compute.manager [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Took 7.02 seconds to destroy the instance on the hypervisor. [ 2476.412747] env[63279]: DEBUG oslo.service.loopingcall [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2476.412951] env[63279]: DEBUG oslo_vmware.api [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088266, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146644} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2476.413154] env[63279]: DEBUG nova.compute.manager [-] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2476.413248] env[63279]: DEBUG nova.network.neutron [-] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2476.414772] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2476.414958] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2476.415150] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2476.415319] env[63279]: INFO nova.compute.manager [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Took 8.33 seconds to destroy the instance on the hypervisor. [ 2476.415533] env[63279]: DEBUG oslo.service.loopingcall [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2476.415713] env[63279]: DEBUG nova.compute.manager [-] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2476.415803] env[63279]: DEBUG nova.network.neutron [-] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2476.658837] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbded081-53d3-48fc-a32c-be44d193d09b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2476.677784] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f60d0e-e4b3-4981-ba90-15d642fbdf09 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2476.684489] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance '6c4d6006-656b-4d30-a595-8985d7f6b1e7' progress to 83 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2477.140152] env[63279]: DEBUG nova.compute.manager [req-aba4ec96-257f-4820-97ea-0b057c24930e req-33861929-c623-4729-a2b9-78a122ae5432 service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Received event network-vif-deleted-be3c9abe-2016-4ca7-9982-16776729e694 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2477.140413] env[63279]: INFO nova.compute.manager [req-aba4ec96-257f-4820-97ea-0b057c24930e req-33861929-c623-4729-a2b9-78a122ae5432 service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Neutron deleted interface be3c9abe-2016-4ca7-9982-16776729e694; detaching it from the instance and deleting it from the info cache [ 2477.140549] env[63279]: DEBUG nova.network.neutron [req-aba4ec96-257f-4820-97ea-0b057c24930e req-33861929-c623-4729-a2b9-78a122ae5432 service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2477.190107] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2477.190368] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c2e51f7-67bf-4fcd-9e23-4f568728364e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2477.198714] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2477.198714] env[63279]: value = "task-2088268" [ 2477.198714] env[63279]: _type = "Task" [ 2477.198714] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2477.206520] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088268, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2477.619636] env[63279]: DEBUG nova.network.neutron [-] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2477.645395] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57e563e4-06d6-49a1-9951-202cc50bea08 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2477.655124] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be642ef9-7cd9-48cd-9f60-47b33d4d7ee3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2477.665545] env[63279]: DEBUG nova.network.neutron [-] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2477.684785] env[63279]: DEBUG nova.compute.manager [req-aba4ec96-257f-4820-97ea-0b057c24930e req-33861929-c623-4729-a2b9-78a122ae5432 service nova] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Detach interface failed, port_id=be3c9abe-2016-4ca7-9982-16776729e694, reason: Instance 246b5346-93b1-4a84-921c-d028f3554d3d could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2477.709872] env[63279]: DEBUG oslo_vmware.api [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088268, 'name': PowerOnVM_Task, 'duration_secs': 0.398829} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2477.710176] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2477.710367] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-954b17c5-8056-486f-8f88-f4f4e9a1d734 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance '6c4d6006-656b-4d30-a595-8985d7f6b1e7' progress to 100 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2478.122733] env[63279]: INFO nova.compute.manager [-] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Took 1.71 seconds to deallocate network for instance. [ 2478.168639] env[63279]: INFO nova.compute.manager [-] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Took 1.76 seconds to deallocate network for instance. [ 2478.675232] env[63279]: DEBUG oslo_concurrency.lockutils [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2478.675503] env[63279]: DEBUG oslo_concurrency.lockutils [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2478.675723] env[63279]: DEBUG nova.objects.instance [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lazy-loading 'resources' on Instance uuid efddec10-b496-446e-a05a-72c9f2d86ed9 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2478.678861] env[63279]: INFO nova.compute.manager [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Took 0.56 seconds to detach 1 volumes for instance. [ 2479.168708] env[63279]: DEBUG nova.compute.manager [req-70f270de-f9d7-4c86-babc-656d505f681f req-782a7240-0b5c-451a-982e-9e591f5ad34a service nova] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Received event network-vif-deleted-cfd0d031-3ccd-4ad0-9c99-6436a05c108d {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2479.186527] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2479.270864] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0718d46-d17f-4bfa-af87-61f127779aeb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.278642] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190d8168-3d40-463f-bef0-380f19ad521f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.308487] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede6c217-a59c-459f-8a15-9f089e0dc95b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.315709] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fefdea7c-5a84-4eff-95a4-c9fef47b844f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.328361] env[63279]: DEBUG nova.compute.provider_tree [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2479.547261] env[63279]: DEBUG nova.network.neutron [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Port 548d4882-d674-412a-9b89-4691e9eda165 binding to destination host cpu-1 is already ACTIVE {{(pid=63279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2479.547534] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2479.547693] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2479.547858] env[63279]: DEBUG nova.network.neutron [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2479.831508] env[63279]: DEBUG nova.scheduler.client.report [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2480.306266] env[63279]: DEBUG nova.network.neutron [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance_info_cache with network_info: [{"id": "548d4882-d674-412a-9b89-4691e9eda165", "address": "fa:16:3e:50:5f:f2", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap548d4882-d6", "ovs_interfaceid": "548d4882-d674-412a-9b89-4691e9eda165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2480.338528] env[63279]: DEBUG oslo_concurrency.lockutils [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.663s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2480.340754] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.154s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2480.340965] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2480.358324] env[63279]: INFO nova.scheduler.client.report [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Deleted allocations for instance efddec10-b496-446e-a05a-72c9f2d86ed9 [ 2480.363030] env[63279]: INFO nova.scheduler.client.report [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleted allocations for instance 246b5346-93b1-4a84-921c-d028f3554d3d [ 2480.809401] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2480.865759] env[63279]: DEBUG oslo_concurrency.lockutils [None req-abb9b3c3-77e7-47d4-92fa-e62897ccab6d tempest-AttachVolumeShelveTestJSON-2048214312 tempest-AttachVolumeShelveTestJSON-2048214312-project-member] Lock "efddec10-b496-446e-a05a-72c9f2d86ed9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.982s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2480.870723] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0e0e044b-4820-4388-ad14-064b60a71dd4 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "246b5346-93b1-4a84-921c-d028f3554d3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.294s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2481.313969] env[63279]: DEBUG nova.compute.manager [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63279) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 2481.313969] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2481.314281] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2481.817037] env[63279]: DEBUG nova.objects.instance [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lazy-loading 'migration_context' on Instance uuid 6c4d6006-656b-4d30-a595-8985d7f6b1e7 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2482.389321] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2482.389586] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2482.401582] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aba68bd-f32c-4c14-a37f-039aa8d111b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2482.410019] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e93bbbb-d4b4-405f-9b34-2676a0701d9d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2482.441331] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89ea9c1-7c2f-49bd-98c8-5b129f07923b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2482.448486] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ec61c8-6d05-4748-897f-7ba44b79bef6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2482.461360] env[63279]: DEBUG nova.compute.provider_tree [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2482.891687] env[63279]: DEBUG nova.compute.manager [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2482.964621] env[63279]: DEBUG nova.scheduler.client.report [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2483.413433] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2483.832768] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "3832508d-5d12-42a2-93d8-61775907b2d2" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2483.833046] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2483.975390] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.661s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2483.980948] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.568s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2483.982434] env[63279]: INFO nova.compute.claims [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2484.336164] env[63279]: DEBUG nova.compute.utils [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2484.838740] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2485.074759] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b52c84-6787-4300-bb8e-afc2773672d2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.082578] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3684d5f-94f6-4f68-b863-dd429a642cfa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.111852] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ae7658-9a87-470e-bf08-985e00eaef8e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.118972] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673b05b5-5b70-4931-b0fb-063099428449 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.132451] env[63279]: DEBUG nova.compute.provider_tree [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2485.516380] env[63279]: INFO nova.compute.manager [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Swapping old allocation on dict_keys(['0ba7c625-a0fc-4d3c-b804-196d00f00137']) held by migration 334d7c87-2c28-45ff-a326-a7187552859b for instance [ 2485.538157] env[63279]: DEBUG nova.scheduler.client.report [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Overwriting current allocation {'allocations': {'0ba7c625-a0fc-4d3c-b804-196d00f00137': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 178}}, 'project_id': 'f39174e9ff5649e0ade4391da383dfb2', 'user_id': 'ae6ae670025f406e8bd0362124749c43', 'consumer_generation': 1} on consumer 6c4d6006-656b-4d30-a595-8985d7f6b1e7 {{(pid=63279) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 2485.636327] env[63279]: DEBUG nova.scheduler.client.report [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2485.645796] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2485.645981] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2485.646176] env[63279]: DEBUG nova.network.neutron [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2485.901808] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "3832508d-5d12-42a2-93d8-61775907b2d2" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2485.902127] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2485.902365] env[63279]: INFO nova.compute.manager [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Attaching volume 72669fdc-2a0d-482f-bba0-84d7930e2cb9 to /dev/sdb [ 2485.936874] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3355231-af80-4b15-9ef8-ee97cb290bae {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.944160] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a155284b-c352-41a1-9b5d-60800538fdd0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.957904] env[63279]: DEBUG nova.virt.block_device [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Updating existing volume attachment record: 86444e4b-cedc-4eb6-bfa7-fe269fead042 {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2486.149268] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.168s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2486.149834] env[63279]: DEBUG nova.compute.manager [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2486.388174] env[63279]: DEBUG nova.network.neutron [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance_info_cache with network_info: [{"id": "548d4882-d674-412a-9b89-4691e9eda165", "address": "fa:16:3e:50:5f:f2", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap548d4882-d6", "ovs_interfaceid": "548d4882-d674-412a-9b89-4691e9eda165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2486.657740] env[63279]: DEBUG nova.compute.utils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2486.659345] env[63279]: DEBUG nova.compute.manager [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2486.659529] env[63279]: DEBUG nova.network.neutron [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2486.708468] env[63279]: DEBUG nova.policy [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'efd8e2a39ad843b79762189fcc8aa8ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '996f8d6e14a14ac39f207eced547ef33', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2486.891250] env[63279]: DEBUG oslo_concurrency.lockutils [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-6c4d6006-656b-4d30-a595-8985d7f6b1e7" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2486.891724] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2486.892065] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1235584d-6592-4177-91da-e997edd77790 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2486.899203] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2486.899203] env[63279]: value = "task-2088271" [ 2486.899203] env[63279]: _type = "Task" [ 2486.899203] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2486.907846] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088271, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2486.968153] env[63279]: DEBUG nova.network.neutron [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Successfully created port: 5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2487.163645] env[63279]: DEBUG nova.compute.manager [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2487.409503] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088271, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2487.910088] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088271, 'name': PowerOffVM_Task, 'duration_secs': 0.841673} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2487.910461] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2487.911021] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2487.911249] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2487.911409] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2487.911598] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2487.911752] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2487.911948] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2487.912199] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2487.912370] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2487.912544] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2487.912709] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2487.912917] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2487.917828] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60bbc719-5d2f-4793-b3bc-029ec6c69268 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2487.934848] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2487.934848] env[63279]: value = "task-2088272" [ 2487.934848] env[63279]: _type = "Task" [ 2487.934848] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2487.942668] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088272, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2488.021902] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2488.021902] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2488.174736] env[63279]: DEBUG nova.compute.manager [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2488.202648] env[63279]: DEBUG nova.virt.hardware [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2488.202925] env[63279]: DEBUG nova.virt.hardware [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2488.203113] env[63279]: DEBUG nova.virt.hardware [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2488.203310] env[63279]: DEBUG nova.virt.hardware [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2488.203465] env[63279]: DEBUG nova.virt.hardware [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2488.203616] env[63279]: DEBUG nova.virt.hardware [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2488.203842] env[63279]: DEBUG nova.virt.hardware [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2488.204050] env[63279]: DEBUG nova.virt.hardware [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2488.204237] env[63279]: DEBUG nova.virt.hardware [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2488.204410] env[63279]: DEBUG nova.virt.hardware [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2488.204587] env[63279]: DEBUG nova.virt.hardware [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2488.205463] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5aa7cae-8978-465f-8b0e-744029ab6084 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2488.212958] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14cb995a-1a68-4185-b245-3a26918ff2aa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2488.444949] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088272, 'name': ReconfigVM_Task, 'duration_secs': 0.155896} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2488.445741] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c7ed9c-2094-4cb0-ac9b-64681072e521 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2488.462653] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2488.462897] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2488.463039] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2488.463232] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2488.463383] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2488.463533] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2488.463736] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2488.463903] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2488.464090] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2488.464260] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2488.464436] env[63279]: DEBUG nova.virt.hardware [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2488.465161] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba0c7518-d624-44f8-9e48-d9becfe0af34 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2488.469730] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2488.469730] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d7c84c-0fc8-8fde-43bf-f58cf15f31e4" [ 2488.469730] env[63279]: _type = "Task" [ 2488.469730] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2488.477097] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d7c84c-0fc8-8fde-43bf-f58cf15f31e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2488.525172] env[63279]: DEBUG nova.compute.utils [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2488.980269] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d7c84c-0fc8-8fde-43bf-f58cf15f31e4, 'name': SearchDatastore_Task, 'duration_secs': 0.007236} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2488.985628] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Reconfiguring VM instance instance-00000072 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2488.985908] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f570273-3b9b-41b4-9ce3-022ad3bdb504 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.004165] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2489.004165] env[63279]: value = "task-2088274" [ 2489.004165] env[63279]: _type = "Task" [ 2489.004165] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2489.011292] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088274, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2489.028071] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2489.514230] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088274, 'name': ReconfigVM_Task, 'duration_secs': 0.194934} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2489.514515] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Reconfigured VM instance instance-00000072 to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2489.515293] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162f36d0-4be2-409a-9aff-0b236b8e7073 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.538244] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 6c4d6006-656b-4d30-a595-8985d7f6b1e7/6c4d6006-656b-4d30-a595-8985d7f6b1e7.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2489.538538] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a00c64f-dc25-4542-a660-bf42e8dbb592 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.557082] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2489.557082] env[63279]: value = "task-2088275" [ 2489.557082] env[63279]: _type = "Task" [ 2489.557082] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2489.567324] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088275, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2490.068479] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088275, 'name': ReconfigVM_Task, 'duration_secs': 0.25928} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2490.069824] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 6c4d6006-656b-4d30-a595-8985d7f6b1e7/6c4d6006-656b-4d30-a595-8985d7f6b1e7.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2490.070111] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1df3b50-6d09-4390-a5cd-0598df775535 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2490.087363] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2456b0d-19a9-4fdb-9243-edec0ec8cbb4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2490.104751] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2490.104980] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2490.105224] env[63279]: INFO nova.compute.manager [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Attaching volume 38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61 to /dev/sdb [ 2490.107421] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fadb761-34a9-4d7e-b696-f549111b305a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2490.126936] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d06b70-b3f7-431c-af20-949467238041 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2490.133225] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2490.134152] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea4132a8-6e40-4316-80c9-c930e5a50206 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2490.135911] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d617f06f-fd96-47fa-8f2e-76cb27846ced {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2490.141980] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6982b378-b34e-4521-bd50-96f9cdae8877 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2490.144974] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2490.144974] env[63279]: value = "task-2088276" [ 2490.144974] env[63279]: _type = "Task" [ 2490.144974] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2490.152177] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088276, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2490.155441] env[63279]: DEBUG nova.virt.block_device [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Updating existing volume attachment record: 30b795d6-57b6-4860-b5d4-5fb6375f3321 {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2490.503337] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2490.503337] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427805', 'volume_id': '72669fdc-2a0d-482f-bba0-84d7930e2cb9', 'name': 'volume-72669fdc-2a0d-482f-bba0-84d7930e2cb9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3832508d-5d12-42a2-93d8-61775907b2d2', 'attached_at': '', 'detached_at': '', 'volume_id': '72669fdc-2a0d-482f-bba0-84d7930e2cb9', 'serial': '72669fdc-2a0d-482f-bba0-84d7930e2cb9'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2490.504276] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082b5af5-191a-4779-bc2e-98fac42bb1cb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2490.522036] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ec8207-85df-4ddf-ac0c-2cdd6e2a4a5f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2490.547025] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] volume-72669fdc-2a0d-482f-bba0-84d7930e2cb9/volume-72669fdc-2a0d-482f-bba0-84d7930e2cb9.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2490.547025] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-119e5563-f671-41a1-b34b-812eeabba165 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2490.564642] env[63279]: DEBUG oslo_vmware.api [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2490.564642] env[63279]: value = "task-2088280" [ 2490.564642] env[63279]: _type = "Task" [ 2490.564642] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2490.573535] env[63279]: DEBUG oslo_vmware.api [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088280, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2490.655127] env[63279]: DEBUG oslo_vmware.api [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088276, 'name': PowerOnVM_Task, 'duration_secs': 0.363008} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2490.655490] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2491.075455] env[63279]: DEBUG oslo_vmware.api [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088280, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2491.574991] env[63279]: DEBUG oslo_vmware.api [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088280, 'name': ReconfigVM_Task, 'duration_secs': 0.831135} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2491.575303] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Reconfigured VM instance instance-00000073 to attach disk [datastore1] volume-72669fdc-2a0d-482f-bba0-84d7930e2cb9/volume-72669fdc-2a0d-482f-bba0-84d7930e2cb9.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2491.579968] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-203149b8-58e6-4517-b81e-236eb66c9e8a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2491.594809] env[63279]: DEBUG oslo_vmware.api [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2491.594809] env[63279]: value = "task-2088281" [ 2491.594809] env[63279]: _type = "Task" [ 2491.594809] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2491.602560] env[63279]: DEBUG oslo_vmware.api [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088281, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2491.666747] env[63279]: INFO nova.compute.manager [None req-656580aa-159e-4098-a70f-2d71791426ae tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance to original state: 'active' [ 2492.104229] env[63279]: DEBUG oslo_vmware.api [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088281, 'name': ReconfigVM_Task, 'duration_secs': 0.142277} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2492.104600] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427805', 'volume_id': '72669fdc-2a0d-482f-bba0-84d7930e2cb9', 'name': 'volume-72669fdc-2a0d-482f-bba0-84d7930e2cb9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3832508d-5d12-42a2-93d8-61775907b2d2', 'attached_at': '', 'detached_at': '', 'volume_id': '72669fdc-2a0d-482f-bba0-84d7930e2cb9', 'serial': '72669fdc-2a0d-482f-bba0-84d7930e2cb9'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2493.140684] env[63279]: DEBUG nova.objects.instance [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lazy-loading 'flavor' on Instance uuid 3832508d-5d12-42a2-93d8-61775907b2d2 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2493.473285] env[63279]: DEBUG oslo_concurrency.lockutils [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2493.474068] env[63279]: DEBUG oslo_concurrency.lockutils [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2493.474298] env[63279]: DEBUG oslo_concurrency.lockutils [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2493.474492] env[63279]: DEBUG oslo_concurrency.lockutils [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2493.474667] env[63279]: DEBUG oslo_concurrency.lockutils [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2493.476692] env[63279]: INFO nova.compute.manager [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Terminating instance [ 2493.645758] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc1d1c80-963d-492f-ac12-627734a2f9de tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.744s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2493.980416] env[63279]: DEBUG nova.compute.manager [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2493.980600] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2493.981755] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55769e5a-56af-4b8a-8851-1a03d6a85836 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2493.989647] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2493.989909] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5cd10fb5-eb5e-4c1f-ba37-f59a014a0dbc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2493.995828] env[63279]: DEBUG oslo_vmware.api [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2493.995828] env[63279]: value = "task-2088283" [ 2493.995828] env[63279]: _type = "Task" [ 2493.995828] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2494.003733] env[63279]: DEBUG oslo_vmware.api [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2494.086352] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "3832508d-5d12-42a2-93d8-61775907b2d2" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2494.086590] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2494.505479] env[63279]: DEBUG oslo_vmware.api [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088283, 'name': PowerOffVM_Task, 'duration_secs': 0.21803} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2494.505782] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2494.505920] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2494.506197] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8fc13e0c-5ee0-4500-ae67-d8c4a117f8de {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2494.589366] env[63279]: DEBUG nova.compute.utils [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2494.708312] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2494.708563] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427807', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'name': 'volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1520999b-e3e9-41b3-82e4-91bb556e96c4', 'attached_at': '', 'detached_at': '', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'serial': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2494.709451] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a32bdb-93ea-49c7-b167-f34662df4d53 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2494.725535] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bba1aba-0ae0-4962-b557-83c94cc4e790 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2494.749907] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61/volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2494.750159] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af5db663-20fc-4a10-a382-d9671b4990b0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2494.767458] env[63279]: DEBUG oslo_vmware.api [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2494.767458] env[63279]: value = "task-2088285" [ 2494.767458] env[63279]: _type = "Task" [ 2494.767458] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2494.775144] env[63279]: DEBUG oslo_vmware.api [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088285, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2495.093076] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2495.192488] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2495.192654] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2495.192823] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleting the datastore file [datastore1] 6c4d6006-656b-4d30-a595-8985d7f6b1e7 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2495.193104] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-837d6caf-6d2e-4335-a59a-c0d514fc59b1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2495.199960] env[63279]: DEBUG oslo_vmware.api [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2495.199960] env[63279]: value = "task-2088286" [ 2495.199960] env[63279]: _type = "Task" [ 2495.199960] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2495.208123] env[63279]: DEBUG oslo_vmware.api [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088286, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2495.277515] env[63279]: DEBUG oslo_vmware.api [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088285, 'name': ReconfigVM_Task, 'duration_secs': 0.319566} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2495.277797] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Reconfigured VM instance instance-00000074 to attach disk [datastore1] volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61/volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2495.282455] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b716c762-4c29-4c27-8fd2-2af890b004b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2495.297011] env[63279]: DEBUG oslo_vmware.api [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2495.297011] env[63279]: value = "task-2088287" [ 2495.297011] env[63279]: _type = "Task" [ 2495.297011] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2495.305080] env[63279]: DEBUG oslo_vmware.api [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088287, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2495.709911] env[63279]: DEBUG oslo_vmware.api [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088286, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12837} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2495.710392] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2495.710581] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2495.710759] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2495.710937] env[63279]: INFO nova.compute.manager [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Took 1.73 seconds to destroy the instance on the hypervisor. [ 2495.711218] env[63279]: DEBUG oslo.service.loopingcall [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2495.711407] env[63279]: DEBUG nova.compute.manager [-] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2495.711500] env[63279]: DEBUG nova.network.neutron [-] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2495.806490] env[63279]: DEBUG oslo_vmware.api [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088287, 'name': ReconfigVM_Task, 'duration_secs': 0.144139} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2495.806801] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427807', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'name': 'volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1520999b-e3e9-41b3-82e4-91bb556e96c4', 'attached_at': '', 'detached_at': '', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'serial': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2496.146683] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "3832508d-5d12-42a2-93d8-61775907b2d2" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2496.146942] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2496.147193] env[63279]: INFO nova.compute.manager [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Attaching volume 3322ea87-20ed-45ff-ac22-c43c5e5d03ff to /dev/sdc [ 2496.153747] env[63279]: DEBUG nova.compute.manager [req-e1f99f87-e894-44ca-aada-700069d18883 req-38d7b6fa-8aed-40ec-b3d9-c68f62ea3198 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Received event network-vif-deleted-548d4882-d674-412a-9b89-4691e9eda165 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2496.153747] env[63279]: INFO nova.compute.manager [req-e1f99f87-e894-44ca-aada-700069d18883 req-38d7b6fa-8aed-40ec-b3d9-c68f62ea3198 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Neutron deleted interface 548d4882-d674-412a-9b89-4691e9eda165; detaching it from the instance and deleting it from the info cache [ 2496.154383] env[63279]: DEBUG nova.network.neutron [req-e1f99f87-e894-44ca-aada-700069d18883 req-38d7b6fa-8aed-40ec-b3d9-c68f62ea3198 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2496.180906] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68aa9ff2-1fe9-46c6-a6b8-611549da0258 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2496.187776] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190f9ac2-4137-4790-8f65-7c4b1de5dbd5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2496.201884] env[63279]: DEBUG nova.virt.block_device [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Updating existing volume attachment record: 22fdb1d5-2038-4c2b-afc6-3bc298297b1f {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2496.511494] env[63279]: DEBUG nova.network.neutron [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Successfully updated port: 5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2496.632260] env[63279]: DEBUG nova.network.neutron [-] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2496.658635] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9d87f8ec-8af6-4b13-ac3b-d9d542eb33a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2496.668086] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1721f47-ab20-472b-82ac-b133c4ee30a4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2496.694899] env[63279]: DEBUG nova.compute.manager [req-e1f99f87-e894-44ca-aada-700069d18883 req-38d7b6fa-8aed-40ec-b3d9-c68f62ea3198 service nova] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Detach interface failed, port_id=548d4882-d674-412a-9b89-4691e9eda165, reason: Instance 6c4d6006-656b-4d30-a595-8985d7f6b1e7 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2496.852261] env[63279]: DEBUG nova.objects.instance [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lazy-loading 'flavor' on Instance uuid 1520999b-e3e9-41b3-82e4-91bb556e96c4 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2497.014058] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2497.014547] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2497.014671] env[63279]: DEBUG nova.network.neutron [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2497.135197] env[63279]: INFO nova.compute.manager [-] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Took 1.42 seconds to deallocate network for instance. [ 2497.357500] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b4d5be3f-553b-45d9-97b4-019205427623 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.252s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2497.544976] env[63279]: DEBUG nova.network.neutron [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2497.641788] env[63279]: DEBUG oslo_concurrency.lockutils [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2497.642148] env[63279]: DEBUG oslo_concurrency.lockutils [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2497.642350] env[63279]: DEBUG oslo_concurrency.lockutils [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2497.663848] env[63279]: INFO nova.scheduler.client.report [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleted allocations for instance 6c4d6006-656b-4d30-a595-8985d7f6b1e7 [ 2497.687299] env[63279]: DEBUG nova.network.neutron [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updating instance_info_cache with network_info: [{"id": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "address": "fa:16:3e:a5:22:54", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc4a7ed-d1", "ovs_interfaceid": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2497.719750] env[63279]: INFO nova.compute.manager [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Rebuilding instance [ 2497.762211] env[63279]: DEBUG nova.compute.manager [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2497.763128] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ef679f-ac70-4c5e-aae6-6681404ee009 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2498.176009] env[63279]: DEBUG oslo_concurrency.lockutils [None req-51d688de-eb17-4617-b0e6-1eddf76cff18 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "6c4d6006-656b-4d30-a595-8985d7f6b1e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.702s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2498.180582] env[63279]: DEBUG nova.compute.manager [req-45883e47-1bca-4d7b-946e-5f5e69e3aa6f req-c25034c2-3f74-44bf-98b1-f263a64f2775 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Received event network-vif-plugged-5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2498.180838] env[63279]: DEBUG oslo_concurrency.lockutils [req-45883e47-1bca-4d7b-946e-5f5e69e3aa6f req-c25034c2-3f74-44bf-98b1-f263a64f2775 service nova] Acquiring lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2498.181109] env[63279]: DEBUG oslo_concurrency.lockutils [req-45883e47-1bca-4d7b-946e-5f5e69e3aa6f req-c25034c2-3f74-44bf-98b1-f263a64f2775 service nova] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2498.181327] env[63279]: DEBUG oslo_concurrency.lockutils [req-45883e47-1bca-4d7b-946e-5f5e69e3aa6f req-c25034c2-3f74-44bf-98b1-f263a64f2775 service nova] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2498.181536] env[63279]: DEBUG nova.compute.manager [req-45883e47-1bca-4d7b-946e-5f5e69e3aa6f req-c25034c2-3f74-44bf-98b1-f263a64f2775 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] No waiting events found dispatching network-vif-plugged-5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2498.181752] env[63279]: WARNING nova.compute.manager [req-45883e47-1bca-4d7b-946e-5f5e69e3aa6f req-c25034c2-3f74-44bf-98b1-f263a64f2775 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Received unexpected event network-vif-plugged-5fc4a7ed-d188-4387-a1a2-7dc3778475f8 for instance with vm_state building and task_state spawning. [ 2498.181974] env[63279]: DEBUG nova.compute.manager [req-45883e47-1bca-4d7b-946e-5f5e69e3aa6f req-c25034c2-3f74-44bf-98b1-f263a64f2775 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Received event network-changed-5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2498.182168] env[63279]: DEBUG nova.compute.manager [req-45883e47-1bca-4d7b-946e-5f5e69e3aa6f req-c25034c2-3f74-44bf-98b1-f263a64f2775 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Refreshing instance network info cache due to event network-changed-5fc4a7ed-d188-4387-a1a2-7dc3778475f8. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2498.182346] env[63279]: DEBUG oslo_concurrency.lockutils [req-45883e47-1bca-4d7b-946e-5f5e69e3aa6f req-c25034c2-3f74-44bf-98b1-f263a64f2775 service nova] Acquiring lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2498.189371] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2498.189654] env[63279]: DEBUG nova.compute.manager [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Instance network_info: |[{"id": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "address": "fa:16:3e:a5:22:54", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc4a7ed-d1", "ovs_interfaceid": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2498.190594] env[63279]: DEBUG oslo_concurrency.lockutils [req-45883e47-1bca-4d7b-946e-5f5e69e3aa6f req-c25034c2-3f74-44bf-98b1-f263a64f2775 service nova] Acquired lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2498.190778] env[63279]: DEBUG nova.network.neutron [req-45883e47-1bca-4d7b-946e-5f5e69e3aa6f req-c25034c2-3f74-44bf-98b1-f263a64f2775 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Refreshing network info cache for port 5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2498.191866] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:22:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5fc4a7ed-d188-4387-a1a2-7dc3778475f8', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2498.199063] env[63279]: DEBUG oslo.service.loopingcall [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2498.200033] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2498.200277] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82ffb59c-5adb-4df9-b8cb-c18aa008d34b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2498.220351] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2498.220351] env[63279]: value = "task-2088289" [ 2498.220351] env[63279]: _type = "Task" [ 2498.220351] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2498.228055] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088289, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2498.730140] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088289, 'name': CreateVM_Task, 'duration_secs': 0.317348} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2498.730354] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2498.731081] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2498.731334] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2498.731806] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2498.732173] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9e3157d-4d81-4d2f-86a7-b2ff1eeb1e8b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2498.737071] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2498.737071] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52577095-a425-fcfb-2107-77655fdce6c3" [ 2498.737071] env[63279]: _type = "Task" [ 2498.737071] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2498.746541] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52577095-a425-fcfb-2107-77655fdce6c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2498.778503] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2498.778816] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1753d959-40c7-43b2-9679-2f8a0a376336 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2498.785157] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2498.785157] env[63279]: value = "task-2088291" [ 2498.785157] env[63279]: _type = "Task" [ 2498.785157] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2498.793354] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088291, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2498.957965] env[63279]: DEBUG nova.network.neutron [req-45883e47-1bca-4d7b-946e-5f5e69e3aa6f req-c25034c2-3f74-44bf-98b1-f263a64f2775 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updated VIF entry in instance network info cache for port 5fc4a7ed-d188-4387-a1a2-7dc3778475f8. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2498.958522] env[63279]: DEBUG nova.network.neutron [req-45883e47-1bca-4d7b-946e-5f5e69e3aa6f req-c25034c2-3f74-44bf-98b1-f263a64f2775 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updating instance_info_cache with network_info: [{"id": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "address": "fa:16:3e:a5:22:54", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc4a7ed-d1", "ovs_interfaceid": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2499.249373] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52577095-a425-fcfb-2107-77655fdce6c3, 'name': SearchDatastore_Task, 'duration_secs': 0.011709} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2499.249758] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2499.249930] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2499.250188] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2499.250342] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2499.250524] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2499.250776] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-edf7017b-ab22-45da-9052-4d470ea26dc5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2499.258537] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2499.258706] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2499.259372] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1ae3c15-e3d7-4039-ab4e-5c6f0822234f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2499.264068] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2499.264068] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b07cdd-44b9-13e6-46bf-b26336bfeadc" [ 2499.264068] env[63279]: _type = "Task" [ 2499.264068] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2499.271047] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b07cdd-44b9-13e6-46bf-b26336bfeadc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2499.293425] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088291, 'name': PowerOffVM_Task, 'duration_secs': 0.213355} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2499.293665] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2499.348824] env[63279]: INFO nova.compute.manager [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Detaching volume 38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61 [ 2499.376292] env[63279]: INFO nova.virt.block_device [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Attempting to driver detach volume 38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61 from mountpoint /dev/sdb [ 2499.376528] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2499.376715] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427807', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'name': 'volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1520999b-e3e9-41b3-82e4-91bb556e96c4', 'attached_at': '', 'detached_at': '', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'serial': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2499.377604] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e611be9-5f5f-4544-bedb-851d85d0aba7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2499.400498] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdd01d2-a2c4-4557-bab9-e6ec98538e99 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2499.406957] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90e2770-485b-4bf8-9eae-abf0f33d962b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2499.426962] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2307e842-0421-423b-8d6c-b27ec132716e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2499.441238] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] The volume has not been displaced from its original location: [datastore1] volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61/volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2499.446627] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Reconfiguring VM instance instance-00000074 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2499.446917] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12f3c659-deda-4110-8c6c-914aee3bfeb6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2499.461740] env[63279]: DEBUG oslo_concurrency.lockutils [req-45883e47-1bca-4d7b-946e-5f5e69e3aa6f req-c25034c2-3f74-44bf-98b1-f263a64f2775 service nova] Releasing lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2499.464859] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2499.464859] env[63279]: value = "task-2088292" [ 2499.464859] env[63279]: _type = "Task" [ 2499.464859] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2499.473057] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088292, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2499.536420] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2499.536738] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2499.774277] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b07cdd-44b9-13e6-46bf-b26336bfeadc, 'name': SearchDatastore_Task, 'duration_secs': 0.009247} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2499.775063] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2231b1da-f145-442f-afb0-e98287bb8bdd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2499.779744] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2499.779744] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5234caa2-5103-a64a-186f-b7cc2f954c27" [ 2499.779744] env[63279]: _type = "Task" [ 2499.779744] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2499.787918] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5234caa2-5103-a64a-186f-b7cc2f954c27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2499.977829] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088292, 'name': ReconfigVM_Task, 'duration_secs': 0.184812} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2499.977829] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Reconfigured VM instance instance-00000074 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2499.985640] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-716ba5ce-fce8-447e-9ec4-aa1af85865fc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2500.008844] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2500.008844] env[63279]: value = "task-2088293" [ 2500.008844] env[63279]: _type = "Task" [ 2500.008844] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2500.022447] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088293, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2500.039283] env[63279]: DEBUG nova.compute.manager [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2500.289306] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5234caa2-5103-a64a-186f-b7cc2f954c27, 'name': SearchDatastore_Task, 'duration_secs': 0.009881} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2500.289686] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2500.289834] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 011af8fa-0f76-44a8-8b24-b3d65f5e841e/011af8fa-0f76-44a8-8b24-b3d65f5e841e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2500.290107] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14527176-acfe-4e7d-9611-a43d2209c791 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2500.296528] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2500.296528] env[63279]: value = "task-2088294" [ 2500.296528] env[63279]: _type = "Task" [ 2500.296528] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2500.303840] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088294, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2500.519713] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088293, 'name': ReconfigVM_Task, 'duration_secs': 0.15309} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2500.520287] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427807', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'name': 'volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1520999b-e3e9-41b3-82e4-91bb556e96c4', 'attached_at': '', 'detached_at': '', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'serial': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2500.566301] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2500.566610] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2500.568552] env[63279]: INFO nova.compute.claims [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2500.746596] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2500.746845] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427808', 'volume_id': '3322ea87-20ed-45ff-ac22-c43c5e5d03ff', 'name': 'volume-3322ea87-20ed-45ff-ac22-c43c5e5d03ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3832508d-5d12-42a2-93d8-61775907b2d2', 'attached_at': '', 'detached_at': '', 'volume_id': '3322ea87-20ed-45ff-ac22-c43c5e5d03ff', 'serial': '3322ea87-20ed-45ff-ac22-c43c5e5d03ff'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2500.747762] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34906ffa-e5cb-4bba-8741-33c47cc6e00d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2500.764024] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9d7fca-62ba-4215-85d5-b44269519957 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2500.789974] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] volume-3322ea87-20ed-45ff-ac22-c43c5e5d03ff/volume-3322ea87-20ed-45ff-ac22-c43c5e5d03ff.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2500.790189] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b708b6f8-937f-435d-9f2c-b8b410ef8833 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2500.810528] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088294, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.426286} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2500.811621] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 011af8fa-0f76-44a8-8b24-b3d65f5e841e/011af8fa-0f76-44a8-8b24-b3d65f5e841e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2500.811837] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2500.812138] env[63279]: DEBUG oslo_vmware.api [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2500.812138] env[63279]: value = "task-2088295" [ 2500.812138] env[63279]: _type = "Task" [ 2500.812138] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2500.812320] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2bbed263-1b46-4df6-8b93-ecc378167c6b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2500.821699] env[63279]: DEBUG oslo_vmware.api [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088295, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2500.822872] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2500.822872] env[63279]: value = "task-2088296" [ 2500.822872] env[63279]: _type = "Task" [ 2500.822872] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2500.831255] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088296, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2501.324752] env[63279]: DEBUG oslo_vmware.api [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088295, 'name': ReconfigVM_Task, 'duration_secs': 0.381586} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2501.330884] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Reconfigured VM instance instance-00000073 to attach disk [datastore1] volume-3322ea87-20ed-45ff-ac22-c43c5e5d03ff/volume-3322ea87-20ed-45ff-ac22-c43c5e5d03ff.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2501.332765] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba35a88c-f9b5-4a73-ac10-edea4b5352b2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.346960] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088296, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062079} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2501.347623] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2501.348857] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c8b243-fb42-4fc5-b509-eeb34a49662e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.351088] env[63279]: DEBUG oslo_vmware.api [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2501.351088] env[63279]: value = "task-2088297" [ 2501.351088] env[63279]: _type = "Task" [ 2501.351088] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2501.370572] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 011af8fa-0f76-44a8-8b24-b3d65f5e841e/011af8fa-0f76-44a8-8b24-b3d65f5e841e.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2501.371102] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d4e0e99-53f6-40d2-b909-9ecb28fa1736 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.387569] env[63279]: DEBUG oslo_vmware.api [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2501.392189] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2501.392189] env[63279]: value = "task-2088298" [ 2501.392189] env[63279]: _type = "Task" [ 2501.392189] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2501.399663] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088298, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2501.441285] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2501.441433] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 2501.568604] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2501.568919] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca9fbc33-2a03-49d3-abbe-62bcc62061e9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.575482] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2501.575482] env[63279]: value = "task-2088299" [ 2501.575482] env[63279]: _type = "Task" [ 2501.575482] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2501.586584] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] VM already powered off {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2501.586584] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2501.586584] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427807', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'name': 'volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1520999b-e3e9-41b3-82e4-91bb556e96c4', 'attached_at': '', 'detached_at': '', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'serial': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2501.587231] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376823e6-3fe1-4ab8-a83a-d71b60b1606e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.608124] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe78fa8-8ed2-4e35-8d98-5068ae8a9963 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.615273] env[63279]: WARNING nova.virt.vmwareapi.driver [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 2501.615562] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2501.616291] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d671b43-c27d-40e5-876f-be2701ff472f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.624088] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2501.624339] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9423d9f7-098d-48a8-9660-17ef34cadb88 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.665947] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a9d360-8089-4e0e-8889-12ff7918b37c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.674026] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-672602fc-3322-465d-b576-7c72fc0629a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.704815] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c24da03-f805-4b54-bb8c-de8c2d2eed23 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.712432] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e071ae3-9d02-4062-888d-521712d8e968 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.725926] env[63279]: DEBUG nova.compute.provider_tree [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2501.729965] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2501.730256] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2501.730439] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleting the datastore file [datastore1] 1520999b-e3e9-41b3-82e4-91bb556e96c4 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2501.730684] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1dcfd7e1-8f56-4dbb-a005-e7bce2f5b26c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.736104] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2501.736104] env[63279]: value = "task-2088301" [ 2501.736104] env[63279]: _type = "Task" [ 2501.736104] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2501.743654] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2501.860361] env[63279]: DEBUG oslo_vmware.api [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088297, 'name': ReconfigVM_Task, 'duration_secs': 0.134266} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2501.860661] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427808', 'volume_id': '3322ea87-20ed-45ff-ac22-c43c5e5d03ff', 'name': 'volume-3322ea87-20ed-45ff-ac22-c43c5e5d03ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3832508d-5d12-42a2-93d8-61775907b2d2', 'attached_at': '', 'detached_at': '', 'volume_id': '3322ea87-20ed-45ff-ac22-c43c5e5d03ff', 'serial': '3322ea87-20ed-45ff-ac22-c43c5e5d03ff'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2501.901838] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088298, 'name': ReconfigVM_Task, 'duration_secs': 0.278282} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2501.902140] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 011af8fa-0f76-44a8-8b24-b3d65f5e841e/011af8fa-0f76-44a8-8b24-b3d65f5e841e.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2501.902756] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-746ab1e7-5a53-4539-8365-ac29dbe720a2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2501.908229] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2501.908229] env[63279]: value = "task-2088302" [ 2501.908229] env[63279]: _type = "Task" [ 2501.908229] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2501.915317] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088302, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2502.229107] env[63279]: DEBUG nova.scheduler.client.report [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2502.245940] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123943} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2502.246204] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2502.246390] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2502.246569] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2502.417774] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088302, 'name': Rename_Task, 'duration_secs': 0.131457} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2502.418141] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2502.418286] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67e9e929-55fa-450b-af60-f5521e2a58be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2502.423895] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2502.423895] env[63279]: value = "task-2088303" [ 2502.423895] env[63279]: _type = "Task" [ 2502.423895] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2502.431029] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088303, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2502.734127] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.167s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2502.734728] env[63279]: DEBUG nova.compute.manager [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2502.751464] env[63279]: INFO nova.virt.block_device [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Booting with volume 38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61 at /dev/sdb [ 2502.782751] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94e154f0-bfa1-403b-92c7-9ce4f44516e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2502.793567] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea29788-6d8a-4b8b-a21a-dbf0defab201 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2502.820894] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d139be0e-158b-4008-ba9c-3605d50e9712 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2502.829336] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e161e2-8843-4263-b01b-9599c8d33897 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2502.857951] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59f0253-f6a8-4e71-8a6e-81f2d925f60c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2502.865042] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9fe492-286f-49bc-80c2-9f0088005762 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2502.878222] env[63279]: DEBUG nova.virt.block_device [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Updating existing volume attachment record: f19c2414-8d84-4ec0-80ca-5d038db1d171 {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2502.895174] env[63279]: DEBUG nova.objects.instance [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lazy-loading 'flavor' on Instance uuid 3832508d-5d12-42a2-93d8-61775907b2d2 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2502.933999] env[63279]: DEBUG oslo_vmware.api [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088303, 'name': PowerOnVM_Task, 'duration_secs': 0.422693} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2502.934158] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2502.934318] env[63279]: INFO nova.compute.manager [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Took 14.76 seconds to spawn the instance on the hypervisor. [ 2502.934503] env[63279]: DEBUG nova.compute.manager [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2502.935322] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df661c2-4749-4653-8229-fba9284b2d50 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2503.240145] env[63279]: DEBUG nova.compute.utils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2503.241503] env[63279]: DEBUG nova.compute.manager [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2503.241681] env[63279]: DEBUG nova.network.neutron [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2503.280865] env[63279]: DEBUG nova.policy [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae6ae670025f406e8bd0362124749c43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f39174e9ff5649e0ade4391da383dfb2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2503.400530] env[63279]: DEBUG oslo_concurrency.lockutils [None req-01605c6c-81c3-4e55-92d0-8c85ed3f4960 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.253s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2503.451455] env[63279]: INFO nova.compute.manager [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Took 20.05 seconds to build instance. [ 2503.538596] env[63279]: DEBUG nova.network.neutron [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Successfully created port: cb63f840-897f-4d96-aa93-bd770d4895d7 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2503.710113] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "3832508d-5d12-42a2-93d8-61775907b2d2" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2503.710483] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2503.745200] env[63279]: DEBUG nova.compute.manager [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2503.953719] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e64b440e-b5de-46b1-b388-dd738bacea8f tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.564s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2504.213915] env[63279]: INFO nova.compute.manager [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Detaching volume 72669fdc-2a0d-482f-bba0-84d7930e2cb9 [ 2504.246074] env[63279]: INFO nova.virt.block_device [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Attempting to driver detach volume 72669fdc-2a0d-482f-bba0-84d7930e2cb9 from mountpoint /dev/sdb [ 2504.246324] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2504.246518] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427805', 'volume_id': '72669fdc-2a0d-482f-bba0-84d7930e2cb9', 'name': 'volume-72669fdc-2a0d-482f-bba0-84d7930e2cb9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3832508d-5d12-42a2-93d8-61775907b2d2', 'attached_at': '', 'detached_at': '', 'volume_id': '72669fdc-2a0d-482f-bba0-84d7930e2cb9', 'serial': '72669fdc-2a0d-482f-bba0-84d7930e2cb9'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2504.247434] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55482a84-f493-4a02-b446-91410f305d6c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.275630] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a868980-aacc-4677-8568-713a474afc70 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.283515] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e81255c-37c6-4859-ab7e-cb1919f13e9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.306935] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c9ca61-a271-4991-b7e3-83b8d24a1107 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.324059] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] The volume has not been displaced from its original location: [datastore1] volume-72669fdc-2a0d-482f-bba0-84d7930e2cb9/volume-72669fdc-2a0d-482f-bba0-84d7930e2cb9.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2504.329055] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Reconfiguring VM instance instance-00000073 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2504.329345] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97a2029f-8681-4237-a92c-45f03ad5f0f1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.347892] env[63279]: DEBUG oslo_vmware.api [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2504.347892] env[63279]: value = "task-2088304" [ 2504.347892] env[63279]: _type = "Task" [ 2504.347892] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2504.356008] env[63279]: DEBUG oslo_vmware.api [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088304, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2504.441779] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2504.522018] env[63279]: DEBUG nova.compute.manager [req-4575738a-181b-4765-9a4a-39b1e024f9fc req-a935f776-3462-4b66-a261-375889a66f53 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Received event network-changed-5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2504.522273] env[63279]: DEBUG nova.compute.manager [req-4575738a-181b-4765-9a4a-39b1e024f9fc req-a935f776-3462-4b66-a261-375889a66f53 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Refreshing instance network info cache due to event network-changed-5fc4a7ed-d188-4387-a1a2-7dc3778475f8. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2504.522435] env[63279]: DEBUG oslo_concurrency.lockutils [req-4575738a-181b-4765-9a4a-39b1e024f9fc req-a935f776-3462-4b66-a261-375889a66f53 service nova] Acquiring lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2504.522545] env[63279]: DEBUG oslo_concurrency.lockutils [req-4575738a-181b-4765-9a4a-39b1e024f9fc req-a935f776-3462-4b66-a261-375889a66f53 service nova] Acquired lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2504.522707] env[63279]: DEBUG nova.network.neutron [req-4575738a-181b-4765-9a4a-39b1e024f9fc req-a935f776-3462-4b66-a261-375889a66f53 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Refreshing network info cache for port 5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2504.756074] env[63279]: DEBUG nova.compute.manager [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2504.781728] env[63279]: DEBUG nova.virt.hardware [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2504.782011] env[63279]: DEBUG nova.virt.hardware [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2504.782227] env[63279]: DEBUG nova.virt.hardware [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2504.782442] env[63279]: DEBUG nova.virt.hardware [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2504.782598] env[63279]: DEBUG nova.virt.hardware [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2504.782776] env[63279]: DEBUG nova.virt.hardware [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2504.783023] env[63279]: DEBUG nova.virt.hardware [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2504.783242] env[63279]: DEBUG nova.virt.hardware [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2504.783454] env[63279]: DEBUG nova.virt.hardware [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2504.783646] env[63279]: DEBUG nova.virt.hardware [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2504.783868] env[63279]: DEBUG nova.virt.hardware [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2504.785040] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74bdba75-e0cc-47c4-8483-c07676a3ec99 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.793494] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dcffbe8-68b1-4b7a-a67c-d01f29fc5805 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.856660] env[63279]: DEBUG oslo_vmware.api [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088304, 'name': ReconfigVM_Task, 'duration_secs': 0.219138} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2504.856947] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Reconfigured VM instance instance-00000073 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2504.861666] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99c29f5d-ee40-405e-8e86-b2dbea58ea37 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.875825] env[63279]: DEBUG oslo_vmware.api [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2504.875825] env[63279]: value = "task-2088305" [ 2504.875825] env[63279]: _type = "Task" [ 2504.875825] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2504.883163] env[63279]: DEBUG oslo_vmware.api [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088305, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2504.934143] env[63279]: DEBUG nova.compute.manager [req-3aa528b5-7da4-4d73-8f5c-f8767357833d req-8b93b25e-f7bf-41c6-8d1a-b15dd12c8cce service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Received event network-vif-plugged-cb63f840-897f-4d96-aa93-bd770d4895d7 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2504.934363] env[63279]: DEBUG oslo_concurrency.lockutils [req-3aa528b5-7da4-4d73-8f5c-f8767357833d req-8b93b25e-f7bf-41c6-8d1a-b15dd12c8cce service nova] Acquiring lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2504.934566] env[63279]: DEBUG oslo_concurrency.lockutils [req-3aa528b5-7da4-4d73-8f5c-f8767357833d req-8b93b25e-f7bf-41c6-8d1a-b15dd12c8cce service nova] Lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2504.934753] env[63279]: DEBUG oslo_concurrency.lockutils [req-3aa528b5-7da4-4d73-8f5c-f8767357833d req-8b93b25e-f7bf-41c6-8d1a-b15dd12c8cce service nova] Lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2504.934924] env[63279]: DEBUG nova.compute.manager [req-3aa528b5-7da4-4d73-8f5c-f8767357833d req-8b93b25e-f7bf-41c6-8d1a-b15dd12c8cce service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] No waiting events found dispatching network-vif-plugged-cb63f840-897f-4d96-aa93-bd770d4895d7 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2504.935260] env[63279]: WARNING nova.compute.manager [req-3aa528b5-7da4-4d73-8f5c-f8767357833d req-8b93b25e-f7bf-41c6-8d1a-b15dd12c8cce service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Received unexpected event network-vif-plugged-cb63f840-897f-4d96-aa93-bd770d4895d7 for instance with vm_state building and task_state spawning. [ 2504.985663] env[63279]: DEBUG nova.virt.hardware [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2504.985807] env[63279]: DEBUG nova.virt.hardware [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2504.985893] env[63279]: DEBUG nova.virt.hardware [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2504.986097] env[63279]: DEBUG nova.virt.hardware [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2504.986272] env[63279]: DEBUG nova.virt.hardware [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2504.986419] env[63279]: DEBUG nova.virt.hardware [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2504.986623] env[63279]: DEBUG nova.virt.hardware [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2504.986776] env[63279]: DEBUG nova.virt.hardware [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2504.986945] env[63279]: DEBUG nova.virt.hardware [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2504.987270] env[63279]: DEBUG nova.virt.hardware [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2504.987513] env[63279]: DEBUG nova.virt.hardware [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2504.988443] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89f29d9-843e-429a-aab3-7eb237ffe896 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.999014] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928684b3-3181-4499-b502-25e569d3a3d6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.012367] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:ea:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c82cf29b-2996-4e7e-a280-4c7943a6865a', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2505.021117] env[63279]: DEBUG oslo.service.loopingcall [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2505.021751] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2505.022142] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1eb78d3-d3a1-40f4-8f77-ce9c9a0865b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.044851] env[63279]: DEBUG nova.network.neutron [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Successfully updated port: cb63f840-897f-4d96-aa93-bd770d4895d7 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2505.053666] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2505.053666] env[63279]: value = "task-2088306" [ 2505.053666] env[63279]: _type = "Task" [ 2505.053666] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2505.061393] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088306, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2505.288592] env[63279]: DEBUG nova.network.neutron [req-4575738a-181b-4765-9a4a-39b1e024f9fc req-a935f776-3462-4b66-a261-375889a66f53 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updated VIF entry in instance network info cache for port 5fc4a7ed-d188-4387-a1a2-7dc3778475f8. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2505.289080] env[63279]: DEBUG nova.network.neutron [req-4575738a-181b-4765-9a4a-39b1e024f9fc req-a935f776-3462-4b66-a261-375889a66f53 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updating instance_info_cache with network_info: [{"id": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "address": "fa:16:3e:a5:22:54", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc4a7ed-d1", "ovs_interfaceid": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2505.389565] env[63279]: DEBUG oslo_vmware.api [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088305, 'name': ReconfigVM_Task, 'duration_secs': 0.12914} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2505.390079] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427805', 'volume_id': '72669fdc-2a0d-482f-bba0-84d7930e2cb9', 'name': 'volume-72669fdc-2a0d-482f-bba0-84d7930e2cb9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3832508d-5d12-42a2-93d8-61775907b2d2', 'attached_at': '', 'detached_at': '', 'volume_id': '72669fdc-2a0d-482f-bba0-84d7930e2cb9', 'serial': '72669fdc-2a0d-482f-bba0-84d7930e2cb9'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2505.547502] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2505.547811] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2505.547864] env[63279]: DEBUG nova.network.neutron [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2505.564203] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088306, 'name': CreateVM_Task, 'duration_secs': 0.488378} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2505.564429] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2505.565009] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2505.565198] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2505.565514] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2505.565757] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d606deb8-f8b0-48ab-a827-e157ca627765 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.571125] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2505.571125] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5234f024-f5d3-1fcb-6816-021f24ec1831" [ 2505.571125] env[63279]: _type = "Task" [ 2505.571125] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2505.579493] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5234f024-f5d3-1fcb-6816-021f24ec1831, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2505.791941] env[63279]: DEBUG oslo_concurrency.lockutils [req-4575738a-181b-4765-9a4a-39b1e024f9fc req-a935f776-3462-4b66-a261-375889a66f53 service nova] Releasing lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2505.940145] env[63279]: DEBUG nova.objects.instance [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lazy-loading 'flavor' on Instance uuid 3832508d-5d12-42a2-93d8-61775907b2d2 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2506.080507] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5234f024-f5d3-1fcb-6816-021f24ec1831, 'name': SearchDatastore_Task, 'duration_secs': 0.0104} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2506.081257] env[63279]: DEBUG nova.network.neutron [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2506.083093] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2506.083335] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2506.083572] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2506.083725] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2506.083903] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2506.084496] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1595d7b3-e8c0-40af-8161-dc1cf1531d6b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2506.093163] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2506.093344] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2506.094037] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78c2b2ee-78ef-45b2-b431-b266d9832b5c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2506.098951] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2506.098951] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c320e9-b575-75c1-8688-117643eca7db" [ 2506.098951] env[63279]: _type = "Task" [ 2506.098951] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2506.107921] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c320e9-b575-75c1-8688-117643eca7db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2506.257548] env[63279]: DEBUG nova.network.neutron [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Updating instance_info_cache with network_info: [{"id": "cb63f840-897f-4d96-aa93-bd770d4895d7", "address": "fa:16:3e:db:98:eb", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb63f840-89", "ovs_interfaceid": "cb63f840-897f-4d96-aa93-bd770d4895d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2506.436663] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2506.609511] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c320e9-b575-75c1-8688-117643eca7db, 'name': SearchDatastore_Task, 'duration_secs': 0.009034} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2506.610265] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d846fd93-bbfe-4219-92c8-444745be5c95 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2506.615310] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2506.615310] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5298c1bc-2832-4253-b9e5-b688accd1fe6" [ 2506.615310] env[63279]: _type = "Task" [ 2506.615310] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2506.622338] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5298c1bc-2832-4253-b9e5-b688accd1fe6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2506.759998] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2506.760339] env[63279]: DEBUG nova.compute.manager [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Instance network_info: |[{"id": "cb63f840-897f-4d96-aa93-bd770d4895d7", "address": "fa:16:3e:db:98:eb", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb63f840-89", "ovs_interfaceid": "cb63f840-897f-4d96-aa93-bd770d4895d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2506.760748] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:98:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57d4be17-536f-4a81-bea9-6547bd50f4a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb63f840-897f-4d96-aa93-bd770d4895d7', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2506.768274] env[63279]: DEBUG oslo.service.loopingcall [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2506.768474] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2506.768693] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53b5c3ec-3c90-46e2-8b91-4d6934e51d0d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2506.788740] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2506.788740] env[63279]: value = "task-2088307" [ 2506.788740] env[63279]: _type = "Task" [ 2506.788740] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2506.797776] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088307, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2506.948785] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b1ca3c89-f0d2-43ad-a74d-e35379800939 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.238s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2506.954706] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "3832508d-5d12-42a2-93d8-61775907b2d2" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2506.954963] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2506.962070] env[63279]: DEBUG nova.compute.manager [req-d498f922-8dd4-4d93-b9e8-22de4da98869 req-6b1d6431-ff5a-4dcd-a84f-1041902750dc service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Received event network-changed-cb63f840-897f-4d96-aa93-bd770d4895d7 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2506.962286] env[63279]: DEBUG nova.compute.manager [req-d498f922-8dd4-4d93-b9e8-22de4da98869 req-6b1d6431-ff5a-4dcd-a84f-1041902750dc service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Refreshing instance network info cache due to event network-changed-cb63f840-897f-4d96-aa93-bd770d4895d7. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2506.962623] env[63279]: DEBUG oslo_concurrency.lockutils [req-d498f922-8dd4-4d93-b9e8-22de4da98869 req-6b1d6431-ff5a-4dcd-a84f-1041902750dc service nova] Acquiring lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2506.962623] env[63279]: DEBUG oslo_concurrency.lockutils [req-d498f922-8dd4-4d93-b9e8-22de4da98869 req-6b1d6431-ff5a-4dcd-a84f-1041902750dc service nova] Acquired lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2506.962777] env[63279]: DEBUG nova.network.neutron [req-d498f922-8dd4-4d93-b9e8-22de4da98869 req-6b1d6431-ff5a-4dcd-a84f-1041902750dc service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Refreshing network info cache for port cb63f840-897f-4d96-aa93-bd770d4895d7 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2507.125282] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5298c1bc-2832-4253-b9e5-b688accd1fe6, 'name': SearchDatastore_Task, 'duration_secs': 0.008698} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2507.125564] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2507.125823] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1520999b-e3e9-41b3-82e4-91bb556e96c4/1520999b-e3e9-41b3-82e4-91bb556e96c4.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2507.126104] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4a0831c-9df1-4095-9786-fc72ff1bb950 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.133448] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2507.133448] env[63279]: value = "task-2088308" [ 2507.133448] env[63279]: _type = "Task" [ 2507.133448] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2507.141014] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2507.300340] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088307, 'name': CreateVM_Task, 'duration_secs': 0.404484} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2507.300541] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2507.301272] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2507.301430] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2507.301792] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2507.302098] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15c7a419-3a0b-408c-91a7-b3cda601055a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.309207] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2507.309207] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529d13bd-7423-b0dc-05f0-bbffd4843dbb" [ 2507.309207] env[63279]: _type = "Task" [ 2507.309207] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2507.318560] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529d13bd-7423-b0dc-05f0-bbffd4843dbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2507.441504] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2507.441726] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 2507.441726] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 2507.460075] env[63279]: INFO nova.compute.manager [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Detaching volume 3322ea87-20ed-45ff-ac22-c43c5e5d03ff [ 2507.496066] env[63279]: INFO nova.virt.block_device [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Attempting to driver detach volume 3322ea87-20ed-45ff-ac22-c43c5e5d03ff from mountpoint /dev/sdc [ 2507.496331] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2507.496527] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427808', 'volume_id': '3322ea87-20ed-45ff-ac22-c43c5e5d03ff', 'name': 'volume-3322ea87-20ed-45ff-ac22-c43c5e5d03ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3832508d-5d12-42a2-93d8-61775907b2d2', 'attached_at': '', 'detached_at': '', 'volume_id': '3322ea87-20ed-45ff-ac22-c43c5e5d03ff', 'serial': '3322ea87-20ed-45ff-ac22-c43c5e5d03ff'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2507.498280] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74979ea7-4a2c-4f46-a08c-9d7d8db86d8f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.522990] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e54fc6-177f-43af-a782-f5c7d097fa59 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.531154] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99790616-ac5d-42b0-8cf7-6c106fb825ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.551713] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caaff498-52d1-456c-8727-eb1dbbfd58c6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.568618] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] The volume has not been displaced from its original location: [datastore1] volume-3322ea87-20ed-45ff-ac22-c43c5e5d03ff/volume-3322ea87-20ed-45ff-ac22-c43c5e5d03ff.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2507.573977] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Reconfiguring VM instance instance-00000073 to detach disk 2002 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2507.576737] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11b4824a-4625-48b4-8e1f-056de7343b4a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.596601] env[63279]: DEBUG oslo_vmware.api [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2507.596601] env[63279]: value = "task-2088309" [ 2507.596601] env[63279]: _type = "Task" [ 2507.596601] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2507.604815] env[63279]: DEBUG oslo_vmware.api [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088309, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2507.643625] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088308, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462269} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2507.646049] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1520999b-e3e9-41b3-82e4-91bb556e96c4/1520999b-e3e9-41b3-82e4-91bb556e96c4.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2507.646276] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2507.646533] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-408e17b6-c345-4618-a937-77d2080477df {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.653201] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2507.653201] env[63279]: value = "task-2088310" [ 2507.653201] env[63279]: _type = "Task" [ 2507.653201] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2507.661308] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2507.779336] env[63279]: DEBUG nova.network.neutron [req-d498f922-8dd4-4d93-b9e8-22de4da98869 req-6b1d6431-ff5a-4dcd-a84f-1041902750dc service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Updated VIF entry in instance network info cache for port cb63f840-897f-4d96-aa93-bd770d4895d7. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2507.779707] env[63279]: DEBUG nova.network.neutron [req-d498f922-8dd4-4d93-b9e8-22de4da98869 req-6b1d6431-ff5a-4dcd-a84f-1041902750dc service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Updating instance_info_cache with network_info: [{"id": "cb63f840-897f-4d96-aa93-bd770d4895d7", "address": "fa:16:3e:db:98:eb", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb63f840-89", "ovs_interfaceid": "cb63f840-897f-4d96-aa93-bd770d4895d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2507.820150] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529d13bd-7423-b0dc-05f0-bbffd4843dbb, 'name': SearchDatastore_Task, 'duration_secs': 0.04659} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2507.820466] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2507.820676] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2507.820906] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2507.821071] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2507.821258] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2507.821769] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8dbaeb40-008d-4362-b338-f7d6fc3b7acc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.829471] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2507.829651] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2507.830353] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c420a97d-5a14-45a8-b93b-44f455f2632f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.835310] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2507.835310] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52170d43-22b8-6e30-3ed3-e2ac8eac2586" [ 2507.835310] env[63279]: _type = "Task" [ 2507.835310] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2507.842707] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52170d43-22b8-6e30-3ed3-e2ac8eac2586, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2507.945120] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Skipping network cache update for instance because it is Building. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10460}} [ 2507.972364] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2507.972535] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquired lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2507.972684] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Forcefully refreshing network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2507.972840] env[63279]: DEBUG nova.objects.instance [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lazy-loading 'info_cache' on Instance uuid befc985f-68e2-4a04-8de0-9ca9bb3fa504 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2508.106702] env[63279]: DEBUG oslo_vmware.api [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088309, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2508.162355] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088310, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061561} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2508.162613] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2508.163363] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc16096c-1cc0-4c74-8b03-85ffcf274b9c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.184410] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 1520999b-e3e9-41b3-82e4-91bb556e96c4/1520999b-e3e9-41b3-82e4-91bb556e96c4.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2508.184669] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97648a85-7275-4d87-a1d3-e936e7401b1d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.202785] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2508.202785] env[63279]: value = "task-2088311" [ 2508.202785] env[63279]: _type = "Task" [ 2508.202785] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2508.210437] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088311, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2508.282880] env[63279]: DEBUG oslo_concurrency.lockutils [req-d498f922-8dd4-4d93-b9e8-22de4da98869 req-6b1d6431-ff5a-4dcd-a84f-1041902750dc service nova] Releasing lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2508.345149] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52170d43-22b8-6e30-3ed3-e2ac8eac2586, 'name': SearchDatastore_Task, 'duration_secs': 0.008297} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2508.345897] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a922a4c-9c18-4b5d-be77-8ca9dba727d3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.352368] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2508.352368] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b4725c-a2ae-7aed-3e30-5fb6a5b63db8" [ 2508.352368] env[63279]: _type = "Task" [ 2508.352368] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2508.359252] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b4725c-a2ae-7aed-3e30-5fb6a5b63db8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2508.607531] env[63279]: DEBUG oslo_vmware.api [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088309, 'name': ReconfigVM_Task, 'duration_secs': 0.625592} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2508.607859] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Reconfigured VM instance instance-00000073 to detach disk 2002 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2508.612431] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62556aa0-e567-4fa4-9240-50b2d7d53ad1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.628159] env[63279]: DEBUG oslo_vmware.api [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2508.628159] env[63279]: value = "task-2088312" [ 2508.628159] env[63279]: _type = "Task" [ 2508.628159] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2508.640744] env[63279]: DEBUG oslo_vmware.api [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088312, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2508.714600] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088311, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2508.863108] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b4725c-a2ae-7aed-3e30-5fb6a5b63db8, 'name': SearchDatastore_Task, 'duration_secs': 0.009119} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2508.863470] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2508.863750] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] c5251417-7bf6-4ed9-9c98-2fa851e4ac27/c5251417-7bf6-4ed9-9c98-2fa851e4ac27.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2508.864032] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2151ed2e-299e-4add-a480-905b71143a2b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.870651] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2508.870651] env[63279]: value = "task-2088313" [ 2508.870651] env[63279]: _type = "Task" [ 2508.870651] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2508.877830] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088313, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2509.139136] env[63279]: DEBUG oslo_vmware.api [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088312, 'name': ReconfigVM_Task, 'duration_secs': 0.141209} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2509.139580] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427808', 'volume_id': '3322ea87-20ed-45ff-ac22-c43c5e5d03ff', 'name': 'volume-3322ea87-20ed-45ff-ac22-c43c5e5d03ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3832508d-5d12-42a2-93d8-61775907b2d2', 'attached_at': '', 'detached_at': '', 'volume_id': '3322ea87-20ed-45ff-ac22-c43c5e5d03ff', 'serial': '3322ea87-20ed-45ff-ac22-c43c5e5d03ff'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2509.214295] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088311, 'name': ReconfigVM_Task, 'duration_secs': 0.625191} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2509.214604] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 1520999b-e3e9-41b3-82e4-91bb556e96c4/1520999b-e3e9-41b3-82e4-91bb556e96c4.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2509.215795] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_secret_uuid': None, 'disk_bus': None, 'size': 0, 'encryption_format': None, 'guest_format': None, 'encryption_options': None, 'encrypted': False, 'device_name': '/dev/sda', 'boot_index': 0, 'device_type': 'disk', 'image_id': '30887889-e45b-4f67-8b3c-16216e594a90'}], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sdb', 'delete_on_termination': False, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427807', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'name': 'volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1520999b-e3e9-41b3-82e4-91bb556e96c4', 'attached_at': '', 'detached_at': '', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'serial': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61'}, 'boot_index': None, 'device_type': None, 'attachment_id': 'f19c2414-8d84-4ec0-80ca-5d038db1d171', 'volume_type': None}], 'swap': None} {{(pid=63279) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2509.216015] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2509.216225] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427807', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'name': 'volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1520999b-e3e9-41b3-82e4-91bb556e96c4', 'attached_at': '', 'detached_at': '', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'serial': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2509.217054] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499a4554-6b26-44cc-9fe9-9ff12381e47c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.234186] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7a74aa-4bc4-4a21-b788-d0bc934b97fb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.259095] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61/volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2509.259444] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27754bf4-d6e5-434b-bf8d-e00a9cc6b1ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.277978] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2509.277978] env[63279]: value = "task-2088314" [ 2509.277978] env[63279]: _type = "Task" [ 2509.277978] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2509.285598] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088314, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2509.383208] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088313, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.417841} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2509.383523] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] c5251417-7bf6-4ed9-9c98-2fa851e4ac27/c5251417-7bf6-4ed9-9c98-2fa851e4ac27.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2509.383768] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2509.384067] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea3be162-ec15-4dd2-a15f-fb9055d7c2a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.390694] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2509.390694] env[63279]: value = "task-2088315" [ 2509.390694] env[63279]: _type = "Task" [ 2509.390694] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2509.398332] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2509.685697] env[63279]: DEBUG nova.objects.instance [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lazy-loading 'flavor' on Instance uuid 3832508d-5d12-42a2-93d8-61775907b2d2 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2509.690410] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Updating instance_info_cache with network_info: [{"id": "48a709b0-3a67-4115-a178-0872536d2417", "address": "fa:16:3e:75:52:17", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48a709b0-3a", "ovs_interfaceid": "48a709b0-3a67-4115-a178-0872536d2417", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2509.787824] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088314, 'name': ReconfigVM_Task, 'duration_secs': 0.270271} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2509.788367] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Reconfigured VM instance instance-00000074 to attach disk [datastore1] volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61/volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2509.792721] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79219621-c889-4136-8e82-59c77c8493ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.806954] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2509.806954] env[63279]: value = "task-2088316" [ 2509.806954] env[63279]: _type = "Task" [ 2509.806954] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2509.814596] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088316, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2509.900251] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088315, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061712} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2509.900539] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2509.901310] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8165ceff-4e64-43c4-a6ab-31639d282fc6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.922524] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] c5251417-7bf6-4ed9-9c98-2fa851e4ac27/c5251417-7bf6-4ed9-9c98-2fa851e4ac27.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2509.922793] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e8d3b2e-f985-450e-ad2f-79d2cc6c7403 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.942473] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2509.942473] env[63279]: value = "task-2088317" [ 2509.942473] env[63279]: _type = "Task" [ 2509.942473] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2509.950977] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088317, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2510.192369] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Releasing lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2510.192369] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Updated the network info_cache for instance {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10518}} [ 2510.192704] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2510.192872] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2510.193051] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2510.316497] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088316, 'name': ReconfigVM_Task, 'duration_secs': 0.442882} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2510.316798] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427807', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'name': 'volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1520999b-e3e9-41b3-82e4-91bb556e96c4', 'attached_at': '', 'detached_at': '', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'serial': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2510.317364] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a99d7d09-d838-4a52-87cc-315c697ef83f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.324012] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2510.324012] env[63279]: value = "task-2088318" [ 2510.324012] env[63279]: _type = "Task" [ 2510.324012] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2510.331756] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088318, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2510.453132] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088317, 'name': ReconfigVM_Task, 'duration_secs': 0.299756} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2510.453132] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Reconfigured VM instance instance-00000077 to attach disk [datastore1] c5251417-7bf6-4ed9-9c98-2fa851e4ac27/c5251417-7bf6-4ed9-9c98-2fa851e4ac27.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2510.453132] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7b95d1f-d806-41b7-a366-c931b770d0b8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.462537] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2510.462537] env[63279]: value = "task-2088319" [ 2510.462537] env[63279]: _type = "Task" [ 2510.462537] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2510.471414] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088319, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2510.693415] env[63279]: DEBUG oslo_concurrency.lockutils [None req-cc7bf279-0ef2-4b8d-8357-95ea2de63110 tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.738s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2510.695532] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2510.695532] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2510.695714] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2510.695777] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2510.696788] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ef2f84-8019-4d22-993a-8a7cc50fe133 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.705130] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3894b075-b77b-4f55-984c-98e8502b27e2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.719764] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1fcb473-6fb8-456e-af53-60a3859eb62b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.726359] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fea4c0f-3b05-4a16-922a-378adb0bc9ec {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.757107] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180614MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2510.757274] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2510.757475] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2510.834129] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088318, 'name': Rename_Task, 'duration_secs': 0.228777} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2510.835405] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2510.837751] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-05e56dd2-ad03-4560-ab0d-966072ad9a05 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.845722] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2510.845722] env[63279]: value = "task-2088320" [ 2510.845722] env[63279]: _type = "Task" [ 2510.845722] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2510.854496] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088320, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2510.973115] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088319, 'name': Rename_Task, 'duration_secs': 0.139182} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2510.973485] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2510.973740] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-879d5bd2-5b89-49df-a091-e8f51e9a1442 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.980683] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2510.980683] env[63279]: value = "task-2088321" [ 2510.980683] env[63279]: _type = "Task" [ 2510.980683] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2510.989202] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088321, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2511.340829] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "3832508d-5d12-42a2-93d8-61775907b2d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2511.341127] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2511.341322] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "3832508d-5d12-42a2-93d8-61775907b2d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2511.341511] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2511.341682] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2511.344196] env[63279]: INFO nova.compute.manager [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Terminating instance [ 2511.355435] env[63279]: DEBUG oslo_vmware.api [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088320, 'name': PowerOnVM_Task, 'duration_secs': 0.455053} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2511.355711] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2511.355936] env[63279]: DEBUG nova.compute.manager [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2511.356997] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb708077-632a-4562-b885-05f8f86fc740 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.490095] env[63279]: DEBUG oslo_vmware.api [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088321, 'name': PowerOnVM_Task, 'duration_secs': 0.478059} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2511.490374] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2511.490575] env[63279]: INFO nova.compute.manager [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Took 6.73 seconds to spawn the instance on the hypervisor. [ 2511.490753] env[63279]: DEBUG nova.compute.manager [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2511.491499] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2952a5-546c-4fe4-80c5-e783826b858c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.787746] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance befc985f-68e2-4a04-8de0-9ca9bb3fa504 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.787959] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 3832508d-5d12-42a2-93d8-61775907b2d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.788142] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 1520999b-e3e9-41b3-82e4-91bb556e96c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.788295] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 011af8fa-0f76-44a8-8b24-b3d65f5e841e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.788405] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance c5251417-7bf6-4ed9-9c98-2fa851e4ac27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.788592] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2511.788730] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2511.848341] env[63279]: DEBUG nova.compute.manager [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2511.848593] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2511.849347] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da692cf-e681-4878-81fc-f3534b812051 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.855323] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6c3d01-7898-46b2-894d-b2003041c216 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.859512] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2511.860060] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca0b8ea9-ef8e-40d9-ad58-45e3d18bdccb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.864679] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315e4cc8-e8c4-44c2-a415-920efa39c96b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.872274] env[63279]: DEBUG oslo_vmware.api [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2511.872274] env[63279]: value = "task-2088322" [ 2511.872274] env[63279]: _type = "Task" [ 2511.872274] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2511.901318] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2511.902852] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dfb669-e944-41c8-960b-412f281b2461 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.908542] env[63279]: DEBUG oslo_vmware.api [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088322, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2511.913501] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1f31a5-4e30-46ff-9f42-e014e5f0a048 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.926855] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2512.007959] env[63279]: INFO nova.compute.manager [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Took 11.46 seconds to build instance. [ 2512.382034] env[63279]: DEBUG oslo_vmware.api [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088322, 'name': PowerOffVM_Task, 'duration_secs': 0.273467} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2512.382349] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2512.382523] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2512.382853] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e129f41b-0cb9-4572-a6b8-39d5a281b0f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2512.429616] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2512.510937] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a2a2252e-af31-4639-9cb5-4e4941de7a04 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.974s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2512.666774] env[63279]: DEBUG nova.compute.manager [req-c102b370-2b16-4799-8c07-f473a92a38a3 req-badc5eec-ba52-4020-8807-59b2acc8a4e6 service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Received event network-changed-cb63f840-897f-4d96-aa93-bd770d4895d7 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2512.666993] env[63279]: DEBUG nova.compute.manager [req-c102b370-2b16-4799-8c07-f473a92a38a3 req-badc5eec-ba52-4020-8807-59b2acc8a4e6 service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Refreshing instance network info cache due to event network-changed-cb63f840-897f-4d96-aa93-bd770d4895d7. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2512.667229] env[63279]: DEBUG oslo_concurrency.lockutils [req-c102b370-2b16-4799-8c07-f473a92a38a3 req-badc5eec-ba52-4020-8807-59b2acc8a4e6 service nova] Acquiring lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2512.667379] env[63279]: DEBUG oslo_concurrency.lockutils [req-c102b370-2b16-4799-8c07-f473a92a38a3 req-badc5eec-ba52-4020-8807-59b2acc8a4e6 service nova] Acquired lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2512.667544] env[63279]: DEBUG nova.network.neutron [req-c102b370-2b16-4799-8c07-f473a92a38a3 req-badc5eec-ba52-4020-8807-59b2acc8a4e6 service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Refreshing network info cache for port cb63f840-897f-4d96-aa93-bd770d4895d7 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2512.934688] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2512.935049] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.177s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2512.935497] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.034s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2512.935686] env[63279]: DEBUG nova.objects.instance [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2513.186508] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2513.376247] env[63279]: DEBUG nova.network.neutron [req-c102b370-2b16-4799-8c07-f473a92a38a3 req-badc5eec-ba52-4020-8807-59b2acc8a4e6 service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Updated VIF entry in instance network info cache for port cb63f840-897f-4d96-aa93-bd770d4895d7. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2513.376620] env[63279]: DEBUG nova.network.neutron [req-c102b370-2b16-4799-8c07-f473a92a38a3 req-badc5eec-ba52-4020-8807-59b2acc8a4e6 service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Updating instance_info_cache with network_info: [{"id": "cb63f840-897f-4d96-aa93-bd770d4895d7", "address": "fa:16:3e:db:98:eb", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb63f840-89", "ovs_interfaceid": "cb63f840-897f-4d96-aa93-bd770d4895d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2513.879190] env[63279]: DEBUG oslo_concurrency.lockutils [req-c102b370-2b16-4799-8c07-f473a92a38a3 req-badc5eec-ba52-4020-8807-59b2acc8a4e6 service nova] Releasing lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2513.944853] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a17de1bf-e154-41f4-a0a9-e3169eea4542 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2514.441738] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2518.436714] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2519.208576] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2519.208746] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2519.209021] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Deleting the datastore file [datastore1] 3832508d-5d12-42a2-93d8-61775907b2d2 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2519.209346] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21342481-fa7a-405c-bd63-853ea753eebe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2519.216496] env[63279]: DEBUG oslo_vmware.api [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for the task: (returnval){ [ 2519.216496] env[63279]: value = "task-2088324" [ 2519.216496] env[63279]: _type = "Task" [ 2519.216496] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2519.224912] env[63279]: DEBUG oslo_vmware.api [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088324, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2519.726110] env[63279]: DEBUG oslo_vmware.api [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Task: {'id': task-2088324, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137823} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2519.726424] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2519.726562] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2519.726736] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2519.726912] env[63279]: INFO nova.compute.manager [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Took 7.88 seconds to destroy the instance on the hypervisor. [ 2519.727174] env[63279]: DEBUG oslo.service.loopingcall [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2519.727373] env[63279]: DEBUG nova.compute.manager [-] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2519.727468] env[63279]: DEBUG nova.network.neutron [-] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2520.175776] env[63279]: DEBUG nova.compute.manager [req-773be31d-943f-4e82-b4e3-96d8a5990a57 req-a2bfbf73-8928-4857-91d4-6fd7e4759e0b service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Received event network-vif-deleted-07a9377a-5b9b-41f4-9f94-eb47c9b38bc4 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2520.175992] env[63279]: INFO nova.compute.manager [req-773be31d-943f-4e82-b4e3-96d8a5990a57 req-a2bfbf73-8928-4857-91d4-6fd7e4759e0b service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Neutron deleted interface 07a9377a-5b9b-41f4-9f94-eb47c9b38bc4; detaching it from the instance and deleting it from the info cache [ 2520.176220] env[63279]: DEBUG nova.network.neutron [req-773be31d-943f-4e82-b4e3-96d8a5990a57 req-a2bfbf73-8928-4857-91d4-6fd7e4759e0b service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2520.654937] env[63279]: DEBUG nova.network.neutron [-] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2520.678911] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0eff9736-04a7-413b-a513-896e76929cb5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2520.688840] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b138f64-6cdd-4808-952b-a29e9117989b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2520.715872] env[63279]: DEBUG nova.compute.manager [req-773be31d-943f-4e82-b4e3-96d8a5990a57 req-a2bfbf73-8928-4857-91d4-6fd7e4759e0b service nova] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Detach interface failed, port_id=07a9377a-5b9b-41f4-9f94-eb47c9b38bc4, reason: Instance 3832508d-5d12-42a2-93d8-61775907b2d2 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2521.157591] env[63279]: INFO nova.compute.manager [-] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Took 1.43 seconds to deallocate network for instance. [ 2521.663589] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2521.663869] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2521.664116] env[63279]: DEBUG nova.objects.instance [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lazy-loading 'resources' on Instance uuid 3832508d-5d12-42a2-93d8-61775907b2d2 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2522.233885] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e3160e-a180-4f2b-bd91-73792f0a4246 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2522.241766] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d936cb1e-51ec-445f-b033-813fb87f9be8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2522.271753] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923006c3-80d8-4f83-9d7c-a263e111f4cf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2522.278554] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d450af-8ace-441f-a38c-e9b85b47e379 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2522.291283] env[63279]: DEBUG nova.compute.provider_tree [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2522.794319] env[63279]: DEBUG nova.scheduler.client.report [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2523.299426] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.635s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2523.320144] env[63279]: INFO nova.scheduler.client.report [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Deleted allocations for instance 3832508d-5d12-42a2-93d8-61775907b2d2 [ 2523.828335] env[63279]: DEBUG oslo_concurrency.lockutils [None req-dcda68d9-35e7-4abf-a025-ae6a510877fc tempest-AttachVolumeTestJSON-1872027629 tempest-AttachVolumeTestJSON-1872027629-project-member] Lock "3832508d-5d12-42a2-93d8-61775907b2d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.487s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2537.091084] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2537.091394] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2537.593691] env[63279]: DEBUG nova.compute.manager [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2538.118377] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2538.118714] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2538.120239] env[63279]: INFO nova.compute.claims [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2539.196709] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbccdba-06cd-44e9-b73a-90bc74bb9058 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2539.204170] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77bf6681-8810-4d55-a03d-9e8b36ed6d2b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2539.233405] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e6dc3e-2500-4b0c-b073-fc51b5e2c85a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2539.240253] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7de51fd-63cf-488c-93ff-cfa81366da75 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2539.252930] env[63279]: DEBUG nova.compute.provider_tree [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2539.772974] env[63279]: ERROR nova.scheduler.client.report [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [req-ee5649bb-88db-407e-ad7a-92b027e40c0c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ee5649bb-88db-407e-ad7a-92b027e40c0c"}]} [ 2539.789040] env[63279]: DEBUG nova.scheduler.client.report [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2539.800572] env[63279]: DEBUG nova.scheduler.client.report [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2539.800779] env[63279]: DEBUG nova.compute.provider_tree [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2539.810075] env[63279]: DEBUG nova.scheduler.client.report [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2539.825586] env[63279]: DEBUG nova.scheduler.client.report [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2539.883564] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bcb120a-7999-479a-a152-df6c07f983eb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2539.891174] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f883c57-467e-4615-8edd-1fb8438314fa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2539.920941] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b45fc4-3489-4bd6-955f-dfa7a16dc5c6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2539.928218] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb74ea1-ef7f-406c-a878-4ab9b81cd344 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2539.941008] env[63279]: DEBUG nova.compute.provider_tree [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2540.472686] env[63279]: DEBUG nova.scheduler.client.report [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 181 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2540.473036] env[63279]: DEBUG nova.compute.provider_tree [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 181 to 182 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2540.473137] env[63279]: DEBUG nova.compute.provider_tree [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2540.978058] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.859s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2540.978575] env[63279]: DEBUG nova.compute.manager [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2541.484360] env[63279]: DEBUG nova.compute.utils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2541.485771] env[63279]: DEBUG nova.compute.manager [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2541.485938] env[63279]: DEBUG nova.network.neutron [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2541.524753] env[63279]: DEBUG nova.policy [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '233f5d339e7b438e910eb03d33891c16', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5fa421934ecd4054a70528644a40349e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2541.816950] env[63279]: DEBUG nova.network.neutron [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Successfully created port: 0db48ddb-8067-45e9-a073-cbfb47873a8b {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2541.989632] env[63279]: DEBUG nova.compute.manager [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2542.257070] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2542.257315] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2542.257508] env[63279]: INFO nova.compute.manager [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Shelving [ 2542.997854] env[63279]: DEBUG nova.compute.manager [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2543.022808] env[63279]: DEBUG nova.virt.hardware [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2543.023070] env[63279]: DEBUG nova.virt.hardware [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2543.023236] env[63279]: DEBUG nova.virt.hardware [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2543.023426] env[63279]: DEBUG nova.virt.hardware [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2543.023599] env[63279]: DEBUG nova.virt.hardware [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2543.023754] env[63279]: DEBUG nova.virt.hardware [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2543.023966] env[63279]: DEBUG nova.virt.hardware [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2543.024170] env[63279]: DEBUG nova.virt.hardware [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2543.024319] env[63279]: DEBUG nova.virt.hardware [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2543.024537] env[63279]: DEBUG nova.virt.hardware [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2543.024653] env[63279]: DEBUG nova.virt.hardware [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2543.026024] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f0395d-b1f0-452c-beb0-12d17543502c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2543.033999] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa57c4a-3adc-42fd-8548-10f0b9024907 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2543.233682] env[63279]: DEBUG nova.compute.manager [req-869bbf88-decf-4139-b3f2-76c43bba3f87 req-b7147389-38b2-4c99-9819-87428a099549 service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Received event network-vif-plugged-0db48ddb-8067-45e9-a073-cbfb47873a8b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2543.233915] env[63279]: DEBUG oslo_concurrency.lockutils [req-869bbf88-decf-4139-b3f2-76c43bba3f87 req-b7147389-38b2-4c99-9819-87428a099549 service nova] Acquiring lock "30ac4320-5ee0-424b-9bbb-e2d53277be80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2543.234172] env[63279]: DEBUG oslo_concurrency.lockutils [req-869bbf88-decf-4139-b3f2-76c43bba3f87 req-b7147389-38b2-4c99-9819-87428a099549 service nova] Lock "30ac4320-5ee0-424b-9bbb-e2d53277be80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2543.235092] env[63279]: DEBUG oslo_concurrency.lockutils [req-869bbf88-decf-4139-b3f2-76c43bba3f87 req-b7147389-38b2-4c99-9819-87428a099549 service nova] Lock "30ac4320-5ee0-424b-9bbb-e2d53277be80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2543.235092] env[63279]: DEBUG nova.compute.manager [req-869bbf88-decf-4139-b3f2-76c43bba3f87 req-b7147389-38b2-4c99-9819-87428a099549 service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] No waiting events found dispatching network-vif-plugged-0db48ddb-8067-45e9-a073-cbfb47873a8b {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2543.235092] env[63279]: WARNING nova.compute.manager [req-869bbf88-decf-4139-b3f2-76c43bba3f87 req-b7147389-38b2-4c99-9819-87428a099549 service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Received unexpected event network-vif-plugged-0db48ddb-8067-45e9-a073-cbfb47873a8b for instance with vm_state building and task_state spawning. [ 2543.267937] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2543.268219] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4894646-d3ed-4157-bf6d-c98f7f09e749 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2543.275223] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2543.275223] env[63279]: value = "task-2088328" [ 2543.275223] env[63279]: _type = "Task" [ 2543.275223] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2543.283872] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088328, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2543.324255] env[63279]: DEBUG nova.network.neutron [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Successfully updated port: 0db48ddb-8067-45e9-a073-cbfb47873a8b {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2543.785750] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088328, 'name': PowerOffVM_Task, 'duration_secs': 0.181724} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2543.786014] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2543.786802] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16bba837-94cd-4567-99e3-30ff9404b338 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2543.804679] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6365338-97b0-4672-bccf-e97759831a4a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2543.827382] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "refresh_cache-30ac4320-5ee0-424b-9bbb-e2d53277be80" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2543.827566] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired lock "refresh_cache-30ac4320-5ee0-424b-9bbb-e2d53277be80" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2543.827660] env[63279]: DEBUG nova.network.neutron [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2544.314810] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Creating Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2544.315235] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-50480696-f341-47c6-9ae9-02b29d612b8a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2544.322644] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2544.322644] env[63279]: value = "task-2088329" [ 2544.322644] env[63279]: _type = "Task" [ 2544.322644] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2544.331759] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088329, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2544.359775] env[63279]: DEBUG nova.network.neutron [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2544.492385] env[63279]: DEBUG nova.network.neutron [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Updating instance_info_cache with network_info: [{"id": "0db48ddb-8067-45e9-a073-cbfb47873a8b", "address": "fa:16:3e:2a:5d:36", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0db48ddb-80", "ovs_interfaceid": "0db48ddb-8067-45e9-a073-cbfb47873a8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2544.834388] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088329, 'name': CreateSnapshot_Task, 'duration_secs': 0.421312} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2544.834662] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Created Snapshot of the VM instance {{(pid=63279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2544.835442] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ad080e-e2bd-4dc9-a6dd-e105bc3812d2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2544.994982] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Releasing lock "refresh_cache-30ac4320-5ee0-424b-9bbb-e2d53277be80" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2544.995352] env[63279]: DEBUG nova.compute.manager [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Instance network_info: |[{"id": "0db48ddb-8067-45e9-a073-cbfb47873a8b", "address": "fa:16:3e:2a:5d:36", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0db48ddb-80", "ovs_interfaceid": "0db48ddb-8067-45e9-a073-cbfb47873a8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2544.995798] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:5d:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e41070eb-3ac1-4ca9-a3d0-fd65893a97de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0db48ddb-8067-45e9-a073-cbfb47873a8b', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2545.003171] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Creating folder: Project (5fa421934ecd4054a70528644a40349e). Parent ref: group-v427491. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2545.003440] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a2774be-d9a3-4121-88eb-cb2c484ed6ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2545.015301] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Created folder: Project (5fa421934ecd4054a70528644a40349e) in parent group-v427491. [ 2545.015462] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Creating folder: Instances. Parent ref: group-v427813. {{(pid=63279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2545.015675] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4caf82e-77bf-4497-bb53-97b1945addad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2545.024422] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Created folder: Instances in parent group-v427813. [ 2545.024639] env[63279]: DEBUG oslo.service.loopingcall [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2545.024814] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2545.024997] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d423cd9e-fc85-43ad-8744-d4ea5a4937f2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2545.043663] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2545.043663] env[63279]: value = "task-2088332" [ 2545.043663] env[63279]: _type = "Task" [ 2545.043663] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2545.050956] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088332, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2545.265670] env[63279]: DEBUG nova.compute.manager [req-5e07ba1d-6b40-438b-af07-2db049c28731 req-3de9f3c5-2722-437e-8a6b-93eb7e95fdc6 service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Received event network-changed-0db48ddb-8067-45e9-a073-cbfb47873a8b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2545.265818] env[63279]: DEBUG nova.compute.manager [req-5e07ba1d-6b40-438b-af07-2db049c28731 req-3de9f3c5-2722-437e-8a6b-93eb7e95fdc6 service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Refreshing instance network info cache due to event network-changed-0db48ddb-8067-45e9-a073-cbfb47873a8b. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2545.266046] env[63279]: DEBUG oslo_concurrency.lockutils [req-5e07ba1d-6b40-438b-af07-2db049c28731 req-3de9f3c5-2722-437e-8a6b-93eb7e95fdc6 service nova] Acquiring lock "refresh_cache-30ac4320-5ee0-424b-9bbb-e2d53277be80" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2545.266196] env[63279]: DEBUG oslo_concurrency.lockutils [req-5e07ba1d-6b40-438b-af07-2db049c28731 req-3de9f3c5-2722-437e-8a6b-93eb7e95fdc6 service nova] Acquired lock "refresh_cache-30ac4320-5ee0-424b-9bbb-e2d53277be80" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2545.266356] env[63279]: DEBUG nova.network.neutron [req-5e07ba1d-6b40-438b-af07-2db049c28731 req-3de9f3c5-2722-437e-8a6b-93eb7e95fdc6 service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Refreshing network info cache for port 0db48ddb-8067-45e9-a073-cbfb47873a8b {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2545.352489] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Creating linked-clone VM from snapshot {{(pid=63279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2545.352971] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-35b99e12-65b8-49b0-878e-9f735c5b533e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2545.362214] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2545.362214] env[63279]: value = "task-2088333" [ 2545.362214] env[63279]: _type = "Task" [ 2545.362214] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2545.369863] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088333, 'name': CloneVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2545.554686] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088332, 'name': CreateVM_Task, 'duration_secs': 0.467767} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2545.554910] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2545.555593] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2545.555815] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2545.556165] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2545.556437] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43ccc6ce-ef70-473a-8f2f-526fc3bc8810 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2545.560766] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2545.560766] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52c71c3c-5d1e-3abb-b5cd-e8d71c4a923d" [ 2545.560766] env[63279]: _type = "Task" [ 2545.560766] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2545.568226] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c71c3c-5d1e-3abb-b5cd-e8d71c4a923d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2545.873931] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088333, 'name': CloneVM_Task} progress is 94%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2545.962981] env[63279]: DEBUG nova.network.neutron [req-5e07ba1d-6b40-438b-af07-2db049c28731 req-3de9f3c5-2722-437e-8a6b-93eb7e95fdc6 service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Updated VIF entry in instance network info cache for port 0db48ddb-8067-45e9-a073-cbfb47873a8b. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2545.963405] env[63279]: DEBUG nova.network.neutron [req-5e07ba1d-6b40-438b-af07-2db049c28731 req-3de9f3c5-2722-437e-8a6b-93eb7e95fdc6 service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Updating instance_info_cache with network_info: [{"id": "0db48ddb-8067-45e9-a073-cbfb47873a8b", "address": "fa:16:3e:2a:5d:36", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0db48ddb-80", "ovs_interfaceid": "0db48ddb-8067-45e9-a073-cbfb47873a8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2546.071352] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52c71c3c-5d1e-3abb-b5cd-e8d71c4a923d, 'name': SearchDatastore_Task, 'duration_secs': 0.009862} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2546.071599] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2546.071861] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2546.072128] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2546.072286] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2546.072515] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2546.072792] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d432347-9079-4745-8f74-33b5beac9a5d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2546.080872] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2546.081068] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2546.081780] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-affd6426-91bc-4d15-8ba2-d49ff6f7ce03 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2546.087154] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2546.087154] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52d82e10-468e-c890-cc9a-871969c42636" [ 2546.087154] env[63279]: _type = "Task" [ 2546.087154] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2546.094751] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d82e10-468e-c890-cc9a-871969c42636, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2546.373111] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088333, 'name': CloneVM_Task} progress is 95%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2546.466112] env[63279]: DEBUG oslo_concurrency.lockutils [req-5e07ba1d-6b40-438b-af07-2db049c28731 req-3de9f3c5-2722-437e-8a6b-93eb7e95fdc6 service nova] Releasing lock "refresh_cache-30ac4320-5ee0-424b-9bbb-e2d53277be80" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2546.598231] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52d82e10-468e-c890-cc9a-871969c42636, 'name': SearchDatastore_Task, 'duration_secs': 0.008265} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2546.598945] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86a5596c-e475-4a65-8bdb-b4abceb18052 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2546.603875] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2546.603875] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]524ecde4-68ec-9189-3a37-ae1f1f82c33d" [ 2546.603875] env[63279]: _type = "Task" [ 2546.603875] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2546.610904] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524ecde4-68ec-9189-3a37-ae1f1f82c33d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2546.873936] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088333, 'name': CloneVM_Task, 'duration_secs': 1.310948} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2546.873936] env[63279]: INFO nova.virt.vmwareapi.vmops [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Created linked-clone VM from snapshot [ 2546.875151] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f52076e-1d86-4cd0-9313-a6a3d632b2d9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2546.882075] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Uploading image 7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20 {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2546.913840] env[63279]: DEBUG oslo_vmware.rw_handles [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2546.913840] env[63279]: value = "vm-427816" [ 2546.913840] env[63279]: _type = "VirtualMachine" [ 2546.913840] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2546.914132] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f2233c63-26a2-4ae2-a846-e50e13c58ced {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2546.921265] env[63279]: DEBUG oslo_vmware.rw_handles [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lease: (returnval){ [ 2546.921265] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527369f7-e9b4-7754-0b01-8cc6aa29e7fd" [ 2546.921265] env[63279]: _type = "HttpNfcLease" [ 2546.921265] env[63279]: } obtained for exporting VM: (result){ [ 2546.921265] env[63279]: value = "vm-427816" [ 2546.921265] env[63279]: _type = "VirtualMachine" [ 2546.921265] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2546.921551] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the lease: (returnval){ [ 2546.921551] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527369f7-e9b4-7754-0b01-8cc6aa29e7fd" [ 2546.921551] env[63279]: _type = "HttpNfcLease" [ 2546.921551] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2546.928372] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2546.928372] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527369f7-e9b4-7754-0b01-8cc6aa29e7fd" [ 2546.928372] env[63279]: _type = "HttpNfcLease" [ 2546.928372] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2547.114547] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524ecde4-68ec-9189-3a37-ae1f1f82c33d, 'name': SearchDatastore_Task, 'duration_secs': 0.008643} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2547.114814] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2547.115077] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 30ac4320-5ee0-424b-9bbb-e2d53277be80/30ac4320-5ee0-424b-9bbb-e2d53277be80.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2547.115331] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65f5af62-0c57-4206-9776-94ae07e54536 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2547.121489] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2547.121489] env[63279]: value = "task-2088335" [ 2547.121489] env[63279]: _type = "Task" [ 2547.121489] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2547.128705] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088335, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2547.430093] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2547.430093] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527369f7-e9b4-7754-0b01-8cc6aa29e7fd" [ 2547.430093] env[63279]: _type = "HttpNfcLease" [ 2547.430093] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2547.430584] env[63279]: DEBUG oslo_vmware.rw_handles [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2547.430584] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527369f7-e9b4-7754-0b01-8cc6aa29e7fd" [ 2547.430584] env[63279]: _type = "HttpNfcLease" [ 2547.430584] env[63279]: }. {{(pid=63279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2547.431158] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b55008-7854-4fe8-a9d9-50b54823d45c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2547.439179] env[63279]: DEBUG oslo_vmware.rw_handles [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278f3a2-1e5c-08cd-5a1e-ccea962b8154/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2547.439384] env[63279]: DEBUG oslo_vmware.rw_handles [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278f3a2-1e5c-08cd-5a1e-ccea962b8154/disk-0.vmdk for reading. {{(pid=63279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2547.550034] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6682e4f5-73b7-4fa3-becb-3c8402b0109e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2547.631083] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088335, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.434327} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2547.631350] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 30ac4320-5ee0-424b-9bbb-e2d53277be80/30ac4320-5ee0-424b-9bbb-e2d53277be80.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2547.631567] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2547.631817] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2498c10f-23fb-4943-9c57-2ad738b7ee74 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2547.637674] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2547.637674] env[63279]: value = "task-2088336" [ 2547.637674] env[63279]: _type = "Task" [ 2547.637674] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2547.644688] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088336, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2548.147566] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088336, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066426} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2548.147946] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2548.148764] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c877527f-9275-470a-8941-125da1ff6e7b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2548.171640] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 30ac4320-5ee0-424b-9bbb-e2d53277be80/30ac4320-5ee0-424b-9bbb-e2d53277be80.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2548.172201] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-805a899f-e72c-4f62-bb45-f79be26a1ff0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2548.192127] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2548.192127] env[63279]: value = "task-2088337" [ 2548.192127] env[63279]: _type = "Task" [ 2548.192127] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2548.202612] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088337, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2548.368545] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2548.368923] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2548.704042] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088337, 'name': ReconfigVM_Task, 'duration_secs': 0.3314} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2548.704452] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 30ac4320-5ee0-424b-9bbb-e2d53277be80/30ac4320-5ee0-424b-9bbb-e2d53277be80.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2548.705262] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94a4b214-f5a0-4d96-acfa-8c30ba26e028 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2548.712202] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2548.712202] env[63279]: value = "task-2088338" [ 2548.712202] env[63279]: _type = "Task" [ 2548.712202] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2548.719843] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088338, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2548.872963] env[63279]: INFO nova.compute.manager [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Detaching volume 38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61 [ 2548.905187] env[63279]: INFO nova.virt.block_device [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Attempting to driver detach volume 38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61 from mountpoint /dev/sdb [ 2548.905493] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2548.905701] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427807', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'name': 'volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1520999b-e3e9-41b3-82e4-91bb556e96c4', 'attached_at': '', 'detached_at': '', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'serial': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2548.906750] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac61aeb-965c-42a2-8f87-d91d4296d5b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2548.929435] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f18d79-8c60-4971-8396-4b04d967312c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2548.937147] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e0aed3-45c8-47f2-aad0-49683c7a42f1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2548.959045] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eca2fda-2fed-4c82-aa2a-108fd58c83d2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2548.973990] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] The volume has not been displaced from its original location: [datastore1] volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61/volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2548.979266] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Reconfiguring VM instance instance-00000074 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2548.979656] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fd7933f-10fc-4b2d-be1d-97266e0ea106 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2548.998357] env[63279]: DEBUG oslo_vmware.api [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2548.998357] env[63279]: value = "task-2088339" [ 2548.998357] env[63279]: _type = "Task" [ 2548.998357] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2549.006344] env[63279]: DEBUG oslo_vmware.api [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088339, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2549.222225] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088338, 'name': Rename_Task, 'duration_secs': 0.145693} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2549.222549] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2549.222904] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42be8ce3-cc1b-4bce-9cbc-0fffa8a10434 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2549.228901] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2549.228901] env[63279]: value = "task-2088340" [ 2549.228901] env[63279]: _type = "Task" [ 2549.228901] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2549.236641] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088340, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2549.508969] env[63279]: DEBUG oslo_vmware.api [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088339, 'name': ReconfigVM_Task, 'duration_secs': 0.332657} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2549.509286] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Reconfigured VM instance instance-00000074 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2549.514141] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cdcbf17-d10d-4d7c-a4ff-ca35a80b1f52 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2549.530673] env[63279]: DEBUG oslo_vmware.api [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2549.530673] env[63279]: value = "task-2088341" [ 2549.530673] env[63279]: _type = "Task" [ 2549.530673] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2549.539466] env[63279]: DEBUG oslo_vmware.api [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088341, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2549.738603] env[63279]: DEBUG oslo_vmware.api [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088340, 'name': PowerOnVM_Task, 'duration_secs': 0.465982} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2549.739070] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2549.739070] env[63279]: INFO nova.compute.manager [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Took 6.74 seconds to spawn the instance on the hypervisor. [ 2549.739251] env[63279]: DEBUG nova.compute.manager [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2549.740043] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a5a99e-6df8-4f6d-a883-4c943d41d26e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2549.905942] env[63279]: DEBUG oslo_concurrency.lockutils [None req-35afa1f0-4a50-4e3e-acee-df581056ec6a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2549.906272] env[63279]: DEBUG oslo_concurrency.lockutils [None req-35afa1f0-4a50-4e3e-acee-df581056ec6a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2549.906471] env[63279]: DEBUG nova.compute.manager [None req-35afa1f0-4a50-4e3e-acee-df581056ec6a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2549.907407] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae5da9d-c01f-42f2-b545-8e21459fb314 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2549.914291] env[63279]: DEBUG nova.compute.manager [None req-35afa1f0-4a50-4e3e-acee-df581056ec6a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2549.914856] env[63279]: DEBUG nova.objects.instance [None req-35afa1f0-4a50-4e3e-acee-df581056ec6a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lazy-loading 'flavor' on Instance uuid c5251417-7bf6-4ed9-9c98-2fa851e4ac27 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2550.041317] env[63279]: DEBUG oslo_vmware.api [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088341, 'name': ReconfigVM_Task, 'duration_secs': 0.179477} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2550.041631] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427807', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'name': 'volume-38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1520999b-e3e9-41b3-82e4-91bb556e96c4', 'attached_at': '', 'detached_at': '', 'volume_id': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61', 'serial': '38f45d1c-4e6e-4faa-8d8d-499e9e5fdd61'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2550.257153] env[63279]: INFO nova.compute.manager [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Took 12.16 seconds to build instance. [ 2550.586506] env[63279]: DEBUG nova.objects.instance [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lazy-loading 'flavor' on Instance uuid 1520999b-e3e9-41b3-82e4-91bb556e96c4 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2550.759681] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc2fa236-5cd3-40d8-86be-454f71d4f6b8 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.668s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2550.921338] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-35afa1f0-4a50-4e3e-acee-df581056ec6a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2550.921953] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63738544-75c2-4c66-95f7-9769b721b6fe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2550.929778] env[63279]: DEBUG oslo_vmware.api [None req-35afa1f0-4a50-4e3e-acee-df581056ec6a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2550.929778] env[63279]: value = "task-2088342" [ 2550.929778] env[63279]: _type = "Task" [ 2550.929778] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2550.937588] env[63279]: DEBUG oslo_vmware.api [None req-35afa1f0-4a50-4e3e-acee-df581056ec6a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088342, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2551.353910] env[63279]: DEBUG nova.compute.manager [req-59e0edf2-6736-4b1c-8152-17fa422fc364 req-4081b63a-21ce-4ea7-8f07-5303259d22f7 service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Received event network-changed-0db48ddb-8067-45e9-a073-cbfb47873a8b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2551.353910] env[63279]: DEBUG nova.compute.manager [req-59e0edf2-6736-4b1c-8152-17fa422fc364 req-4081b63a-21ce-4ea7-8f07-5303259d22f7 service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Refreshing instance network info cache due to event network-changed-0db48ddb-8067-45e9-a073-cbfb47873a8b. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2551.353910] env[63279]: DEBUG oslo_concurrency.lockutils [req-59e0edf2-6736-4b1c-8152-17fa422fc364 req-4081b63a-21ce-4ea7-8f07-5303259d22f7 service nova] Acquiring lock "refresh_cache-30ac4320-5ee0-424b-9bbb-e2d53277be80" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2551.353910] env[63279]: DEBUG oslo_concurrency.lockutils [req-59e0edf2-6736-4b1c-8152-17fa422fc364 req-4081b63a-21ce-4ea7-8f07-5303259d22f7 service nova] Acquired lock "refresh_cache-30ac4320-5ee0-424b-9bbb-e2d53277be80" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2551.354389] env[63279]: DEBUG nova.network.neutron [req-59e0edf2-6736-4b1c-8152-17fa422fc364 req-4081b63a-21ce-4ea7-8f07-5303259d22f7 service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Refreshing network info cache for port 0db48ddb-8067-45e9-a073-cbfb47873a8b {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2551.441704] env[63279]: DEBUG oslo_vmware.api [None req-35afa1f0-4a50-4e3e-acee-df581056ec6a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088342, 'name': PowerOffVM_Task, 'duration_secs': 0.311422} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2551.442017] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-35afa1f0-4a50-4e3e-acee-df581056ec6a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2551.442280] env[63279]: DEBUG nova.compute.manager [None req-35afa1f0-4a50-4e3e-acee-df581056ec6a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2551.443100] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3adc1109-4cd9-4dc2-96c2-f946cd6e989e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2551.594986] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9c7fe352-7c45-49c0-a7a0-e6b770340d11 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.226s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2551.956049] env[63279]: DEBUG oslo_concurrency.lockutils [None req-35afa1f0-4a50-4e3e-acee-df581056ec6a tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.050s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2552.108907] env[63279]: DEBUG nova.network.neutron [req-59e0edf2-6736-4b1c-8152-17fa422fc364 req-4081b63a-21ce-4ea7-8f07-5303259d22f7 service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Updated VIF entry in instance network info cache for port 0db48ddb-8067-45e9-a073-cbfb47873a8b. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2552.109301] env[63279]: DEBUG nova.network.neutron [req-59e0edf2-6736-4b1c-8152-17fa422fc364 req-4081b63a-21ce-4ea7-8f07-5303259d22f7 service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Updating instance_info_cache with network_info: [{"id": "0db48ddb-8067-45e9-a073-cbfb47873a8b", "address": "fa:16:3e:2a:5d:36", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0db48ddb-80", "ovs_interfaceid": "0db48ddb-8067-45e9-a073-cbfb47873a8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2552.370676] env[63279]: DEBUG nova.objects.instance [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lazy-loading 'flavor' on Instance uuid c5251417-7bf6-4ed9-9c98-2fa851e4ac27 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2552.611952] env[63279]: DEBUG oslo_concurrency.lockutils [req-59e0edf2-6736-4b1c-8152-17fa422fc364 req-4081b63a-21ce-4ea7-8f07-5303259d22f7 service nova] Releasing lock "refresh_cache-30ac4320-5ee0-424b-9bbb-e2d53277be80" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2552.632643] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2552.632930] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2552.633164] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "1520999b-e3e9-41b3-82e4-91bb556e96c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2552.633358] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2552.633541] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2552.636203] env[63279]: INFO nova.compute.manager [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Terminating instance [ 2552.875882] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2552.876144] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2552.876265] env[63279]: DEBUG nova.network.neutron [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2552.876446] env[63279]: DEBUG nova.objects.instance [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lazy-loading 'info_cache' on Instance uuid c5251417-7bf6-4ed9-9c98-2fa851e4ac27 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2553.141000] env[63279]: DEBUG nova.compute.manager [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2553.141398] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2553.142269] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05957638-8d78-466b-9ef4-04a2c702f8ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2553.150801] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2553.151095] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9327abf9-ce95-49b0-96e1-c0bcb94326c0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2553.157415] env[63279]: DEBUG oslo_vmware.api [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2553.157415] env[63279]: value = "task-2088343" [ 2553.157415] env[63279]: _type = "Task" [ 2553.157415] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2553.165545] env[63279]: DEBUG oslo_vmware.api [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088343, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2553.380328] env[63279]: DEBUG nova.objects.base [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2553.668065] env[63279]: DEBUG oslo_vmware.api [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088343, 'name': PowerOffVM_Task, 'duration_secs': 0.261464} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2553.668065] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2553.668065] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2553.668065] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20154b5b-44c1-4f4f-87e5-1e876f20d822 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2554.097626] env[63279]: DEBUG nova.network.neutron [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Updating instance_info_cache with network_info: [{"id": "cb63f840-897f-4d96-aa93-bd770d4895d7", "address": "fa:16:3e:db:98:eb", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb63f840-89", "ovs_interfaceid": "cb63f840-897f-4d96-aa93-bd770d4895d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2554.600884] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2555.582960] env[63279]: DEBUG oslo_vmware.rw_handles [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278f3a2-1e5c-08cd-5a1e-ccea962b8154/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2555.583926] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d952656a-9826-4b41-a3e2-909cc3e77ce8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2555.590278] env[63279]: DEBUG oslo_vmware.rw_handles [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278f3a2-1e5c-08cd-5a1e-ccea962b8154/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2555.590458] env[63279]: ERROR oslo_vmware.rw_handles [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278f3a2-1e5c-08cd-5a1e-ccea962b8154/disk-0.vmdk due to incomplete transfer. [ 2555.590696] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d7c05919-d068-439b-9873-6fe2f5c5ce99 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2555.597564] env[63279]: DEBUG oslo_vmware.rw_handles [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278f3a2-1e5c-08cd-5a1e-ccea962b8154/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2555.597763] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Uploaded image 7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20 to the Glance image server {{(pid=63279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2555.600157] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Destroying the VM {{(pid=63279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2555.600388] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-290f4fea-76b8-464b-89b3-24afe19a5172 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2555.606258] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2555.606548] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2555.606548] env[63279]: value = "task-2088345" [ 2555.606548] env[63279]: _type = "Task" [ 2555.606548] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2555.606756] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dbfd3077-c272-4753-8dda-9b6bdea4d3d7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2555.617044] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088345, 'name': Destroy_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2555.618147] env[63279]: DEBUG oslo_vmware.api [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2555.618147] env[63279]: value = "task-2088346" [ 2555.618147] env[63279]: _type = "Task" [ 2555.618147] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2555.625022] env[63279]: DEBUG oslo_vmware.api [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088346, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2556.117825] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088345, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2556.126505] env[63279]: DEBUG oslo_vmware.api [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088346, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2556.618924] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088345, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2556.627822] env[63279]: DEBUG oslo_vmware.api [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088346, 'name': PowerOnVM_Task, 'duration_secs': 0.982679} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2556.628090] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2556.628298] env[63279]: DEBUG nova.compute.manager [None req-a13a7fa6-a8a4-4dbc-b6f1-7f2aa02feebf tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2556.629027] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0288701d-e47c-469f-99a2-98f87d31d091 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2557.120123] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088345, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2557.621798] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088345, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2558.122824] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088345, 'name': Destroy_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2558.421830] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2558.422064] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2558.422258] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleting the datastore file [datastore1] 1520999b-e3e9-41b3-82e4-91bb556e96c4 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2558.422579] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-971ffaab-bc56-4e2f-a6d3-9ecd387973f8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.429254] env[63279]: DEBUG oslo_vmware.api [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2558.429254] env[63279]: value = "task-2088347" [ 2558.429254] env[63279]: _type = "Task" [ 2558.429254] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2558.436861] env[63279]: DEBUG oslo_vmware.api [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088347, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2558.467169] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920ab417-255e-4891-a1b3-c571be9dd43d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.473668] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-963694a4-cbd8-4a46-8744-d2582950d2c4 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Suspending the VM {{(pid=63279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2558.473896] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-3b6279e6-f9c3-455c-b9b9-dbc3839c538d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.479755] env[63279]: DEBUG oslo_vmware.api [None req-963694a4-cbd8-4a46-8744-d2582950d2c4 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2558.479755] env[63279]: value = "task-2088348" [ 2558.479755] env[63279]: _type = "Task" [ 2558.479755] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2558.487549] env[63279]: DEBUG oslo_vmware.api [None req-963694a4-cbd8-4a46-8744-d2582950d2c4 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088348, 'name': SuspendVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2558.622790] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088345, 'name': Destroy_Task, 'duration_secs': 2.940183} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2558.623211] env[63279]: INFO nova.virt.vmwareapi.vm_util [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Destroyed the VM [ 2558.623378] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Deleting Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2558.623641] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c65b3a3f-f876-41ee-88ce-05620e4e4ecf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.630228] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2558.630228] env[63279]: value = "task-2088349" [ 2558.630228] env[63279]: _type = "Task" [ 2558.630228] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2558.638059] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088349, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2558.939399] env[63279]: DEBUG oslo_vmware.api [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088347, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191749} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2558.939699] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2558.939900] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2558.940092] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2558.940270] env[63279]: INFO nova.compute.manager [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Took 5.80 seconds to destroy the instance on the hypervisor. [ 2558.940509] env[63279]: DEBUG oslo.service.loopingcall [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2558.940701] env[63279]: DEBUG nova.compute.manager [-] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2558.940796] env[63279]: DEBUG nova.network.neutron [-] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2558.989464] env[63279]: DEBUG oslo_vmware.api [None req-963694a4-cbd8-4a46-8744-d2582950d2c4 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088348, 'name': SuspendVM_Task} progress is 70%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2559.140314] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088349, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2559.493646] env[63279]: DEBUG oslo_vmware.api [None req-963694a4-cbd8-4a46-8744-d2582950d2c4 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088348, 'name': SuspendVM_Task, 'duration_secs': 0.96784} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2559.493646] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-963694a4-cbd8-4a46-8744-d2582950d2c4 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Suspended the VM {{(pid=63279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2559.493646] env[63279]: DEBUG nova.compute.manager [None req-963694a4-cbd8-4a46-8744-d2582950d2c4 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2559.494183] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb8b1f2-d17e-43fb-ad90-51c5f7d511cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2559.641770] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088349, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2559.643851] env[63279]: DEBUG nova.compute.manager [req-d572e5e4-a67e-4ffa-8131-b4e7b1bf2033 req-ab298e8e-ec9d-4b08-950a-68dcf77ae7ce service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Received event network-vif-deleted-c82cf29b-2996-4e7e-a280-4c7943a6865a {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2559.644058] env[63279]: INFO nova.compute.manager [req-d572e5e4-a67e-4ffa-8131-b4e7b1bf2033 req-ab298e8e-ec9d-4b08-950a-68dcf77ae7ce service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Neutron deleted interface c82cf29b-2996-4e7e-a280-4c7943a6865a; detaching it from the instance and deleting it from the info cache [ 2559.644332] env[63279]: DEBUG nova.network.neutron [req-d572e5e4-a67e-4ffa-8131-b4e7b1bf2033 req-ab298e8e-ec9d-4b08-950a-68dcf77ae7ce service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2560.115207] env[63279]: DEBUG nova.network.neutron [-] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2560.142259] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088349, 'name': RemoveSnapshot_Task, 'duration_secs': 1.251977} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2560.142556] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Deleted Snapshot of the VM instance {{(pid=63279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2560.142835] env[63279]: DEBUG nova.compute.manager [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2560.143617] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc07554b-f5af-4e52-a5cc-1bd212157316 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.146405] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3d488b4e-9f94-4b42-a328-e4f7ce2e5c1c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.156619] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5e3f1e-509d-4763-94fe-1c3e076f126a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.183175] env[63279]: DEBUG nova.compute.manager [req-d572e5e4-a67e-4ffa-8131-b4e7b1bf2033 req-ab298e8e-ec9d-4b08-950a-68dcf77ae7ce service nova] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Detach interface failed, port_id=c82cf29b-2996-4e7e-a280-4c7943a6865a, reason: Instance 1520999b-e3e9-41b3-82e4-91bb556e96c4 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2560.617932] env[63279]: INFO nova.compute.manager [-] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Took 1.68 seconds to deallocate network for instance. [ 2560.657331] env[63279]: INFO nova.compute.manager [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Shelve offloading [ 2561.044039] env[63279]: INFO nova.compute.manager [None req-0f45aae6-592f-4597-b6d5-97a31310f2d3 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Resuming [ 2561.044660] env[63279]: DEBUG nova.objects.instance [None req-0f45aae6-592f-4597-b6d5-97a31310f2d3 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lazy-loading 'flavor' on Instance uuid c5251417-7bf6-4ed9-9c98-2fa851e4ac27 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2561.124296] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2561.124556] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2561.124782] env[63279]: DEBUG nova.objects.instance [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lazy-loading 'resources' on Instance uuid 1520999b-e3e9-41b3-82e4-91bb556e96c4 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2561.160426] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2561.160736] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b43ba047-3f1f-4867-8cc3-6cf2b4ee64e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.168747] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2561.168747] env[63279]: value = "task-2088350" [ 2561.168747] env[63279]: _type = "Task" [ 2561.168747] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2561.176572] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2561.681221] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] VM already powered off {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2561.681478] env[63279]: DEBUG nova.compute.manager [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2561.682384] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a00993-5e25-4881-8c91-68bbc39defed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.690440] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2561.690603] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2561.690779] env[63279]: DEBUG nova.network.neutron [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2561.704659] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b12e04-cacc-4de8-9e00-a52eeee3f49b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.711017] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce46181a-3b73-43bf-bad2-896600538a04 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.740319] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa9afd3-831f-40a0-9e8f-0f1a8d666dfc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.747048] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba33882-a1f9-47d2-8318-382ae9bf90a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.759502] env[63279]: DEBUG nova.compute.provider_tree [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2562.291935] env[63279]: DEBUG nova.scheduler.client.report [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 182 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2562.292189] env[63279]: DEBUG nova.compute.provider_tree [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 182 to 183 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2562.292377] env[63279]: DEBUG nova.compute.provider_tree [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2562.402705] env[63279]: DEBUG nova.network.neutron [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updating instance_info_cache with network_info: [{"id": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "address": "fa:16:3e:a5:22:54", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc4a7ed-d1", "ovs_interfaceid": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2562.440824] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2562.440987] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 2562.554113] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f45aae6-592f-4597-b6d5-97a31310f2d3 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2562.554268] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f45aae6-592f-4597-b6d5-97a31310f2d3 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquired lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2562.554437] env[63279]: DEBUG nova.network.neutron [None req-0f45aae6-592f-4597-b6d5-97a31310f2d3 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2562.796866] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.672s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2562.814753] env[63279]: INFO nova.scheduler.client.report [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleted allocations for instance 1520999b-e3e9-41b3-82e4-91bb556e96c4 [ 2562.906082] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2563.274836] env[63279]: DEBUG nova.compute.manager [req-245cacbe-7363-4ff6-b8f4-d3817ab2e9e8 req-9fb6c43e-5a2f-403f-9558-bff0d7e6b201 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Received event network-vif-unplugged-5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2563.275077] env[63279]: DEBUG oslo_concurrency.lockutils [req-245cacbe-7363-4ff6-b8f4-d3817ab2e9e8 req-9fb6c43e-5a2f-403f-9558-bff0d7e6b201 service nova] Acquiring lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2563.275300] env[63279]: DEBUG oslo_concurrency.lockutils [req-245cacbe-7363-4ff6-b8f4-d3817ab2e9e8 req-9fb6c43e-5a2f-403f-9558-bff0d7e6b201 service nova] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2563.275474] env[63279]: DEBUG oslo_concurrency.lockutils [req-245cacbe-7363-4ff6-b8f4-d3817ab2e9e8 req-9fb6c43e-5a2f-403f-9558-bff0d7e6b201 service nova] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2563.275645] env[63279]: DEBUG nova.compute.manager [req-245cacbe-7363-4ff6-b8f4-d3817ab2e9e8 req-9fb6c43e-5a2f-403f-9558-bff0d7e6b201 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] No waiting events found dispatching network-vif-unplugged-5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2563.275818] env[63279]: WARNING nova.compute.manager [req-245cacbe-7363-4ff6-b8f4-d3817ab2e9e8 req-9fb6c43e-5a2f-403f-9558-bff0d7e6b201 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Received unexpected event network-vif-unplugged-5fc4a7ed-d188-4387-a1a2-7dc3778475f8 for instance with vm_state shelved and task_state shelving_offloading. [ 2563.322361] env[63279]: DEBUG oslo_concurrency.lockutils [None req-7de96668-355a-4479-8ef9-703ea9b4f9a1 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "1520999b-e3e9-41b3-82e4-91bb556e96c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.689s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2563.452579] env[63279]: DEBUG nova.network.neutron [None req-0f45aae6-592f-4597-b6d5-97a31310f2d3 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Updating instance_info_cache with network_info: [{"id": "cb63f840-897f-4d96-aa93-bd770d4895d7", "address": "fa:16:3e:db:98:eb", "network": {"id": "82ca068b-acea-4f60-b0b3-68ea1e08aebe", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-571699353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f39174e9ff5649e0ade4391da383dfb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb63f840-89", "ovs_interfaceid": "cb63f840-897f-4d96-aa93-bd770d4895d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2563.462242] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2563.463183] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1fc9b0c-2e66-44fb-9942-a76a2fa66f4c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2563.472325] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2563.472589] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9cbda1c8-ff15-4282-8209-6617c5e425a9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2563.587866] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2563.588144] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2563.588343] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleting the datastore file [datastore1] 011af8fa-0f76-44a8-8b24-b3d65f5e841e {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2563.588609] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f628249c-6a3a-4f4a-9556-d0f8fc790df2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2563.595822] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2563.595822] env[63279]: value = "task-2088352" [ 2563.595822] env[63279]: _type = "Task" [ 2563.595822] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2563.604537] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088352, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2563.955406] env[63279]: DEBUG oslo_concurrency.lockutils [None req-0f45aae6-592f-4597-b6d5-97a31310f2d3 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Releasing lock "refresh_cache-c5251417-7bf6-4ed9-9c98-2fa851e4ac27" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2563.956438] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4555813d-eb5b-444f-ade3-b16bd826b395 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2563.963569] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0f45aae6-592f-4597-b6d5-97a31310f2d3 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Resuming the VM {{(pid=63279) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2563.963812] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49ecc09a-8a9e-4de2-9196-34e5f91dfc3e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2563.969482] env[63279]: DEBUG oslo_vmware.api [None req-0f45aae6-592f-4597-b6d5-97a31310f2d3 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2563.969482] env[63279]: value = "task-2088353" [ 2563.969482] env[63279]: _type = "Task" [ 2563.969482] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2563.978165] env[63279]: DEBUG oslo_vmware.api [None req-0f45aae6-592f-4597-b6d5-97a31310f2d3 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088353, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2564.107912] env[63279]: DEBUG oslo_vmware.api [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088352, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128481} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2564.107912] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2564.107912] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2564.108208] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2564.129095] env[63279]: INFO nova.scheduler.client.report [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleted allocations for instance 011af8fa-0f76-44a8-8b24-b3d65f5e841e [ 2564.441050] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2564.482206] env[63279]: DEBUG oslo_vmware.api [None req-0f45aae6-592f-4597-b6d5-97a31310f2d3 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088353, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2564.590198] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "cc0d3870-41fa-4cd4-a16d-e52e705f29a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2564.590502] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "cc0d3870-41fa-4cd4-a16d-e52e705f29a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2564.633806] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2564.634121] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2564.634378] env[63279]: DEBUG nova.objects.instance [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'resources' on Instance uuid 011af8fa-0f76-44a8-8b24-b3d65f5e841e {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2564.982211] env[63279]: DEBUG oslo_vmware.api [None req-0f45aae6-592f-4597-b6d5-97a31310f2d3 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088353, 'name': PowerOnVM_Task, 'duration_secs': 0.530053} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2564.982593] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-0f45aae6-592f-4597-b6d5-97a31310f2d3 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Resumed the VM {{(pid=63279) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2564.982593] env[63279]: DEBUG nova.compute.manager [None req-0f45aae6-592f-4597-b6d5-97a31310f2d3 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2564.983378] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f69cb4-b8f1-4f64-abb6-8562da3261bd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2565.093262] env[63279]: DEBUG nova.compute.manager [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2565.137514] env[63279]: DEBUG nova.objects.instance [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'numa_topology' on Instance uuid 011af8fa-0f76-44a8-8b24-b3d65f5e841e {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2565.302755] env[63279]: DEBUG nova.compute.manager [req-7d9719be-ca76-4f09-af3b-5059de474068 req-42694bee-93c0-4a21-83e9-e695de3f909f service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Received event network-changed-5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2565.302952] env[63279]: DEBUG nova.compute.manager [req-7d9719be-ca76-4f09-af3b-5059de474068 req-42694bee-93c0-4a21-83e9-e695de3f909f service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Refreshing instance network info cache due to event network-changed-5fc4a7ed-d188-4387-a1a2-7dc3778475f8. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2565.303215] env[63279]: DEBUG oslo_concurrency.lockutils [req-7d9719be-ca76-4f09-af3b-5059de474068 req-42694bee-93c0-4a21-83e9-e695de3f909f service nova] Acquiring lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2565.303384] env[63279]: DEBUG oslo_concurrency.lockutils [req-7d9719be-ca76-4f09-af3b-5059de474068 req-42694bee-93c0-4a21-83e9-e695de3f909f service nova] Acquired lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2565.303541] env[63279]: DEBUG nova.network.neutron [req-7d9719be-ca76-4f09-af3b-5059de474068 req-42694bee-93c0-4a21-83e9-e695de3f909f service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Refreshing network info cache for port 5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2565.615221] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2565.640274] env[63279]: DEBUG nova.objects.base [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Object Instance<011af8fa-0f76-44a8-8b24-b3d65f5e841e> lazy-loaded attributes: resources,numa_topology {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2565.701129] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7518e97d-d5bb-490b-8893-247458d28038 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2565.708763] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84508ba-8e48-4433-b85b-6cb4f0ccca1d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2565.738797] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a120096-e01d-4c20-bd07-fa5fc3b17344 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2565.746391] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd3319e-4e99-45f6-80d2-c0f34fa2996f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2565.761909] env[63279]: DEBUG nova.compute.provider_tree [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2566.015594] env[63279]: DEBUG nova.network.neutron [req-7d9719be-ca76-4f09-af3b-5059de474068 req-42694bee-93c0-4a21-83e9-e695de3f909f service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updated VIF entry in instance network info cache for port 5fc4a7ed-d188-4387-a1a2-7dc3778475f8. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2566.015985] env[63279]: DEBUG nova.network.neutron [req-7d9719be-ca76-4f09-af3b-5059de474068 req-42694bee-93c0-4a21-83e9-e695de3f909f service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updating instance_info_cache with network_info: [{"id": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "address": "fa:16:3e:a5:22:54", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": null, "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap5fc4a7ed-d1", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2566.266482] env[63279]: DEBUG nova.scheduler.client.report [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2566.519099] env[63279]: DEBUG oslo_concurrency.lockutils [req-7d9719be-ca76-4f09-af3b-5059de474068 req-42694bee-93c0-4a21-83e9-e695de3f909f service nova] Releasing lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2566.674639] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2566.772815] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.139s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2566.775294] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.160s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2566.776709] env[63279]: INFO nova.compute.claims [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2566.838020] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2566.838355] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2566.839027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2566.839027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2566.839027] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2566.841333] env[63279]: INFO nova.compute.manager [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Terminating instance [ 2567.285794] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4a55d837-c41a-499c-94b3-d2e9de675875 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.028s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2567.286753] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.612s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2567.286981] env[63279]: INFO nova.compute.manager [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Unshelving [ 2567.345807] env[63279]: DEBUG nova.compute.manager [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2567.346107] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2567.347015] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95aeeda-339a-48ab-9c8c-ba185aa8be39 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.354830] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2567.355115] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b6b6e1f-0ac6-47ef-96b7-6ea8ade4430b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.361391] env[63279]: DEBUG oslo_vmware.api [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2567.361391] env[63279]: value = "task-2088354" [ 2567.361391] env[63279]: _type = "Task" [ 2567.361391] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2567.368777] env[63279]: DEBUG oslo_vmware.api [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088354, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2567.436277] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2567.861421] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce233c8a-577f-4677-bd18-f57613d84184 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.873516] env[63279]: DEBUG oslo_vmware.api [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088354, 'name': PowerOffVM_Task, 'duration_secs': 0.191522} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2567.875347] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2567.875525] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2567.875795] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9d56214-9d4f-41a4-ac6f-18be89e72710 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.877794] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7870793-b449-46ba-bc7f-c2c38d0230a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.908252] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd7cd4b-9a44-40e3-a69b-ebb355bd83e6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.915408] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193133b3-80e5-4efa-bb55-100fb143cc0d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2567.928403] env[63279]: DEBUG nova.compute.provider_tree [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2568.092464] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2568.092683] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2568.092841] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleting the datastore file [datastore1] c5251417-7bf6-4ed9-9c98-2fa851e4ac27 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2568.093142] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28735518-08e1-4c43-8406-e59a347acce0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2568.099570] env[63279]: DEBUG oslo_vmware.api [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for the task: (returnval){ [ 2568.099570] env[63279]: value = "task-2088356" [ 2568.099570] env[63279]: _type = "Task" [ 2568.099570] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2568.106717] env[63279]: DEBUG oslo_vmware.api [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088356, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2568.306541] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2568.431540] env[63279]: DEBUG nova.scheduler.client.report [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2568.609379] env[63279]: DEBUG oslo_vmware.api [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Task: {'id': task-2088356, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165703} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2568.609633] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2568.610030] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2568.610030] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2568.610192] env[63279]: INFO nova.compute.manager [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Took 1.26 seconds to destroy the instance on the hypervisor. [ 2568.610443] env[63279]: DEBUG oslo.service.loopingcall [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2568.610641] env[63279]: DEBUG nova.compute.manager [-] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2568.610739] env[63279]: DEBUG nova.network.neutron [-] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2568.937136] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.162s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2568.937701] env[63279]: DEBUG nova.compute.manager [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2568.941072] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.635s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2568.941386] env[63279]: DEBUG nova.objects.instance [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'pci_requests' on Instance uuid 011af8fa-0f76-44a8-8b24-b3d65f5e841e {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2569.119070] env[63279]: DEBUG nova.compute.manager [req-9cad4e95-6e5b-4f40-80f0-0b0a05d3fe57 req-31e8afb3-7d20-4c1a-a00d-d993c2b2c893 service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Received event network-vif-deleted-cb63f840-897f-4d96-aa93-bd770d4895d7 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2569.119290] env[63279]: INFO nova.compute.manager [req-9cad4e95-6e5b-4f40-80f0-0b0a05d3fe57 req-31e8afb3-7d20-4c1a-a00d-d993c2b2c893 service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Neutron deleted interface cb63f840-897f-4d96-aa93-bd770d4895d7; detaching it from the instance and deleting it from the info cache [ 2569.119464] env[63279]: DEBUG nova.network.neutron [req-9cad4e95-6e5b-4f40-80f0-0b0a05d3fe57 req-31e8afb3-7d20-4c1a-a00d-d993c2b2c893 service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2569.440644] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2569.441168] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 2569.443794] env[63279]: DEBUG nova.compute.utils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2569.446189] env[63279]: DEBUG nova.objects.instance [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'numa_topology' on Instance uuid 011af8fa-0f76-44a8-8b24-b3d65f5e841e {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2569.447393] env[63279]: DEBUG nova.compute.manager [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2569.447562] env[63279]: DEBUG nova.network.neutron [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2569.487066] env[63279]: DEBUG nova.policy [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7cd64afda9f4eb488bdf76a36f2fee6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f9427c264e8e41998f579af352cb48cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2569.601407] env[63279]: DEBUG nova.network.neutron [-] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2569.622327] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e266fd7-6564-4699-b04c-d2f0778a322e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2569.632858] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ac328f-8796-4728-9170-c2e4a00dfe15 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2569.659329] env[63279]: DEBUG nova.compute.manager [req-9cad4e95-6e5b-4f40-80f0-0b0a05d3fe57 req-31e8afb3-7d20-4c1a-a00d-d993c2b2c893 service nova] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Detach interface failed, port_id=cb63f840-897f-4d96-aa93-bd770d4895d7, reason: Instance c5251417-7bf6-4ed9-9c98-2fa851e4ac27 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2569.743357] env[63279]: DEBUG nova.network.neutron [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Successfully created port: 71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2569.948831] env[63279]: DEBUG nova.compute.manager [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2569.956021] env[63279]: INFO nova.compute.claims [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2570.105069] env[63279]: INFO nova.compute.manager [-] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Took 1.49 seconds to deallocate network for instance. [ 2570.611363] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2570.959052] env[63279]: DEBUG nova.compute.manager [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2570.963982] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2570.963982] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquired lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2570.963982] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Forcefully refreshing network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2570.986500] env[63279]: DEBUG nova.virt.hardware [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2570.986743] env[63279]: DEBUG nova.virt.hardware [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2570.986903] env[63279]: DEBUG nova.virt.hardware [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2570.987100] env[63279]: DEBUG nova.virt.hardware [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2570.987254] env[63279]: DEBUG nova.virt.hardware [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2570.987404] env[63279]: DEBUG nova.virt.hardware [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2570.987614] env[63279]: DEBUG nova.virt.hardware [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2570.987778] env[63279]: DEBUG nova.virt.hardware [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2570.987950] env[63279]: DEBUG nova.virt.hardware [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2570.988131] env[63279]: DEBUG nova.virt.hardware [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2570.988309] env[63279]: DEBUG nova.virt.hardware [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2570.989384] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9333988d-1549-4faa-a23d-25ffa7bd6817 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2570.998816] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9352c5-2b0c-463a-aea5-3a6ca96f71b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.032518] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961174c2-7d62-465b-98a4-064657d9ea2a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.039437] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d95ecc-97eb-42dc-aa69-b25f5874c72a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.069316] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf59569-1ec6-48c0-9e16-1958194fd04c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.076035] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e4d804-d4e2-42ca-8acd-77c3a4b7ace8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.088413] env[63279]: DEBUG nova.compute.provider_tree [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2571.608945] env[63279]: ERROR nova.scheduler.client.report [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [req-83f88e3c-8123-4fd3-95c8-23f717b36286] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-83f88e3c-8123-4fd3-95c8-23f717b36286"}]} [ 2571.628214] env[63279]: DEBUG nova.scheduler.client.report [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2571.639772] env[63279]: DEBUG nova.scheduler.client.report [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2571.640000] env[63279]: DEBUG nova.compute.provider_tree [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2571.649378] env[63279]: DEBUG nova.scheduler.client.report [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2571.664767] env[63279]: DEBUG nova.scheduler.client.report [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2571.726143] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282fe72f-d573-4232-b419-8038d149a9c0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.733422] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8420d743-5f48-48cf-8f46-027fe2b95729 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.763105] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30377ea-897b-4224-b406-7736386b15e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.769467] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9844e70-f85c-4242-933f-d9096a36ba47 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.782147] env[63279]: DEBUG nova.compute.provider_tree [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2572.177010] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updating instance_info_cache with network_info: [{"id": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "address": "fa:16:3e:a5:22:54", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": null, "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap5fc4a7ed-d1", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2572.311822] env[63279]: DEBUG nova.scheduler.client.report [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 185 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2572.313122] env[63279]: DEBUG nova.compute.provider_tree [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 185 to 186 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2572.313122] env[63279]: DEBUG nova.compute.provider_tree [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2572.679591] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Releasing lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2572.679893] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updated the network info_cache for instance {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10518}} [ 2572.680036] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2572.680208] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2572.680362] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2572.680512] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2572.817325] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.876s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2572.819582] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.209s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2572.819832] env[63279]: DEBUG nova.objects.instance [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lazy-loading 'resources' on Instance uuid c5251417-7bf6-4ed9-9c98-2fa851e4ac27 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2572.846451] env[63279]: INFO nova.network.neutron [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updating port 5fc4a7ed-d188-4387-a1a2-7dc3778475f8 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2573.183747] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2573.384700] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5f07ce-f7db-401f-8173-c56ed67db19c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2573.392224] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df8833d-19f8-4c76-980e-ea65082d5c16 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2573.422272] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3204d2f7-040a-42c4-8321-0a9b332bd192 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2573.429426] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d98b49a-0dc0-493a-adfd-5de4aad3e2c5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2573.442144] env[63279]: DEBUG nova.compute.provider_tree [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2573.945230] env[63279]: DEBUG nova.scheduler.client.report [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2574.450837] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.631s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2574.453758] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.270s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2574.453758] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2574.453758] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2574.454678] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800b7df6-bc25-45d9-9b35-f14880541f45 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2574.462902] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453065c5-6638-4841-9e4f-8bf23605011c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2574.467959] env[63279]: INFO nova.scheduler.client.report [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Deleted allocations for instance c5251417-7bf6-4ed9-9c98-2fa851e4ac27 [ 2574.479287] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af650778-d1f0-481b-ac9d-23e7379652ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2574.486044] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b03f19-9993-4259-bad0-1302df010366 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2574.515627] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180508MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2574.515768] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2574.515969] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2574.985506] env[63279]: DEBUG oslo_concurrency.lockutils [None req-da81f0e1-3685-4bff-a62b-b1669abf5122 tempest-ServerActionsTestJSON-783283487 tempest-ServerActionsTestJSON-783283487-project-member] Lock "c5251417-7bf6-4ed9-9c98-2fa851e4ac27" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.147s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2575.542309] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance befc985f-68e2-4a04-8de0-9ca9bb3fa504 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2575.542475] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 30ac4320-5ee0-424b-9bbb-e2d53277be80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2575.542598] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance cc0d3870-41fa-4cd4-a16d-e52e705f29a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2575.542736] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 011af8fa-0f76-44a8-8b24-b3d65f5e841e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2575.542928] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2575.543077] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2575.604679] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450a143a-ce6a-49f8-aec3-f05a9a229054 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.612602] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4936a6c-3c73-48dd-bf57-437296d22899 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.640920] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0ff23f-2840-498e-98d5-3948e90d74cc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.647558] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7eea9c2-03eb-411e-a3d0-e1e82db6f465 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2575.660976] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2576.164051] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2576.670510] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2576.670720] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.155s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2578.136167] env[63279]: DEBUG nova.compute.manager [req-b3e10363-a484-4773-8b0a-be712f65101a req-e560efc9-6db4-48b0-8d9f-e94d535cd0b3 service nova] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Received event network-vif-plugged-71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2578.136428] env[63279]: DEBUG oslo_concurrency.lockutils [req-b3e10363-a484-4773-8b0a-be712f65101a req-e560efc9-6db4-48b0-8d9f-e94d535cd0b3 service nova] Acquiring lock "cc0d3870-41fa-4cd4-a16d-e52e705f29a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2578.136596] env[63279]: DEBUG oslo_concurrency.lockutils [req-b3e10363-a484-4773-8b0a-be712f65101a req-e560efc9-6db4-48b0-8d9f-e94d535cd0b3 service nova] Lock "cc0d3870-41fa-4cd4-a16d-e52e705f29a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2578.136771] env[63279]: DEBUG oslo_concurrency.lockutils [req-b3e10363-a484-4773-8b0a-be712f65101a req-e560efc9-6db4-48b0-8d9f-e94d535cd0b3 service nova] Lock "cc0d3870-41fa-4cd4-a16d-e52e705f29a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2578.136959] env[63279]: DEBUG nova.compute.manager [req-b3e10363-a484-4773-8b0a-be712f65101a req-e560efc9-6db4-48b0-8d9f-e94d535cd0b3 service nova] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] No waiting events found dispatching network-vif-plugged-71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2578.137100] env[63279]: WARNING nova.compute.manager [req-b3e10363-a484-4773-8b0a-be712f65101a req-e560efc9-6db4-48b0-8d9f-e94d535cd0b3 service nova] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Received unexpected event network-vif-plugged-71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0 for instance with vm_state building and task_state spawning. [ 2578.219984] env[63279]: DEBUG nova.network.neutron [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Successfully updated port: 71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2578.431799] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2578.723476] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "refresh_cache-cc0d3870-41fa-4cd4-a16d-e52e705f29a3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2578.723724] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "refresh_cache-cc0d3870-41fa-4cd4-a16d-e52e705f29a3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2578.723946] env[63279]: DEBUG nova.network.neutron [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2578.831361] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2578.831547] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2578.831709] env[63279]: DEBUG nova.network.neutron [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2579.256606] env[63279]: DEBUG nova.network.neutron [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2579.421442] env[63279]: DEBUG nova.network.neutron [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Updating instance_info_cache with network_info: [{"id": "71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0", "address": "fa:16:3e:dc:94:2d", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71e3fcb5-bc", "ovs_interfaceid": "71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2579.563410] env[63279]: DEBUG nova.network.neutron [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updating instance_info_cache with network_info: [{"id": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "address": "fa:16:3e:a5:22:54", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc4a7ed-d1", "ovs_interfaceid": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2579.924190] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "refresh_cache-cc0d3870-41fa-4cd4-a16d-e52e705f29a3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2579.924508] env[63279]: DEBUG nova.compute.manager [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Instance network_info: |[{"id": "71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0", "address": "fa:16:3e:dc:94:2d", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71e3fcb5-bc", "ovs_interfaceid": "71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2579.924958] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:94:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2579.932418] env[63279]: DEBUG oslo.service.loopingcall [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2579.932626] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2579.932878] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c0da00b-fe64-426b-adc1-020d4c839b96 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2579.952964] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2579.952964] env[63279]: value = "task-2088357" [ 2579.952964] env[63279]: _type = "Task" [ 2579.952964] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2579.960182] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088357, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2580.066311] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2580.092663] env[63279]: DEBUG nova.virt.hardware [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='b77dd02c0ae41bec6b8a4629c9d0ed6a',container_format='bare',created_at=2025-01-13T18:04:11Z,direct_url=,disk_format='vmdk',id=7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-266855964-shelved',owner='996f8d6e14a14ac39f207eced547ef33',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2025-01-13T18:04:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2580.092943] env[63279]: DEBUG nova.virt.hardware [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2580.093127] env[63279]: DEBUG nova.virt.hardware [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2580.093321] env[63279]: DEBUG nova.virt.hardware [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2580.093475] env[63279]: DEBUG nova.virt.hardware [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2580.093625] env[63279]: DEBUG nova.virt.hardware [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2580.093848] env[63279]: DEBUG nova.virt.hardware [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2580.094054] env[63279]: DEBUG nova.virt.hardware [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2580.094240] env[63279]: DEBUG nova.virt.hardware [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2580.094411] env[63279]: DEBUG nova.virt.hardware [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2580.094589] env[63279]: DEBUG nova.virt.hardware [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2580.095453] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4686f34b-d357-42be-ad10-b51695be66cc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2580.103740] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcaa1ce9-b1e5-494c-97ab-9113843f0cc6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2580.118812] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:22:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5fc4a7ed-d188-4387-a1a2-7dc3778475f8', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2580.126488] env[63279]: DEBUG oslo.service.loopingcall [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2580.126769] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2580.127017] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5c5a0c9-db07-47d8-b37e-20291493558a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2580.146245] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2580.146245] env[63279]: value = "task-2088358" [ 2580.146245] env[63279]: _type = "Task" [ 2580.146245] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2580.154173] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088358, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2580.164375] env[63279]: DEBUG nova.compute.manager [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Received event network-changed-71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2580.164506] env[63279]: DEBUG nova.compute.manager [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Refreshing instance network info cache due to event network-changed-71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2580.164671] env[63279]: DEBUG oslo_concurrency.lockutils [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] Acquiring lock "refresh_cache-cc0d3870-41fa-4cd4-a16d-e52e705f29a3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2580.164820] env[63279]: DEBUG oslo_concurrency.lockutils [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] Acquired lock "refresh_cache-cc0d3870-41fa-4cd4-a16d-e52e705f29a3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2580.165076] env[63279]: DEBUG nova.network.neutron [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Refreshing network info cache for port 71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2580.463386] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088357, 'name': CreateVM_Task, 'duration_secs': 0.341518} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2580.463764] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2580.464253] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2580.464413] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2580.464747] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2580.465020] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7639765c-710d-4520-80b1-ba3e39b2afca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2580.469591] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2580.469591] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523bd381-a399-49b0-aa52-fc5330fa772f" [ 2580.469591] env[63279]: _type = "Task" [ 2580.469591] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2580.477565] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523bd381-a399-49b0-aa52-fc5330fa772f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2580.656458] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088358, 'name': CreateVM_Task, 'duration_secs': 0.399961} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2580.656637] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2580.657256] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2580.657426] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2580.657794] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2580.658054] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f47772c-7846-4708-b914-8c4ae05dd9a4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2580.662368] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2580.662368] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a1502c-3b8e-9407-9b7a-823e97b15a47" [ 2580.662368] env[63279]: _type = "Task" [ 2580.662368] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2580.671226] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a1502c-3b8e-9407-9b7a-823e97b15a47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2580.861246] env[63279]: DEBUG nova.network.neutron [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Updated VIF entry in instance network info cache for port 71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2580.861603] env[63279]: DEBUG nova.network.neutron [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Updating instance_info_cache with network_info: [{"id": "71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0", "address": "fa:16:3e:dc:94:2d", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71e3fcb5-bc", "ovs_interfaceid": "71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2580.979991] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523bd381-a399-49b0-aa52-fc5330fa772f, 'name': SearchDatastore_Task, 'duration_secs': 0.011562} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2580.980285] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2580.980515] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2580.980782] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2580.980937] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2580.981155] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2580.981417] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a6a167d-02bf-4598-bcf2-23fe2db0bb55 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2580.997858] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2580.998043] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2580.998730] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0b68b23-2949-45c9-8516-3b89235ee450 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2581.004357] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2581.004357] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]527cf719-c1a9-9d34-b331-6618b933411b" [ 2581.004357] env[63279]: _type = "Task" [ 2581.004357] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2581.011708] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527cf719-c1a9-9d34-b331-6618b933411b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2581.172310] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2581.172551] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Processing image 7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2581.172816] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2581.172976] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2581.173176] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2581.173416] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-066c57a2-fe7a-497c-adf7-9092b97424f9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2581.181068] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2581.181244] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2581.181896] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0e35a05-0334-41e9-83d3-06eff4cb35c9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2581.186516] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2581.186516] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]523eaca1-63c0-bf49-ea8c-bc18db30ca0b" [ 2581.186516] env[63279]: _type = "Task" [ 2581.186516] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2581.193514] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]523eaca1-63c0-bf49-ea8c-bc18db30ca0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2581.364934] env[63279]: DEBUG oslo_concurrency.lockutils [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] Releasing lock "refresh_cache-cc0d3870-41fa-4cd4-a16d-e52e705f29a3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2581.365256] env[63279]: DEBUG nova.compute.manager [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Received event network-vif-plugged-5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2581.365459] env[63279]: DEBUG oslo_concurrency.lockutils [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] Acquiring lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2581.365671] env[63279]: DEBUG oslo_concurrency.lockutils [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2581.365915] env[63279]: DEBUG oslo_concurrency.lockutils [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2581.366119] env[63279]: DEBUG nova.compute.manager [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] No waiting events found dispatching network-vif-plugged-5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2581.366297] env[63279]: WARNING nova.compute.manager [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Received unexpected event network-vif-plugged-5fc4a7ed-d188-4387-a1a2-7dc3778475f8 for instance with vm_state shelved_offloaded and task_state spawning. [ 2581.366461] env[63279]: DEBUG nova.compute.manager [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Received event network-changed-5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2581.366620] env[63279]: DEBUG nova.compute.manager [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Refreshing instance network info cache due to event network-changed-5fc4a7ed-d188-4387-a1a2-7dc3778475f8. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2581.366811] env[63279]: DEBUG oslo_concurrency.lockutils [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] Acquiring lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2581.366954] env[63279]: DEBUG oslo_concurrency.lockutils [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] Acquired lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2581.367126] env[63279]: DEBUG nova.network.neutron [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Refreshing network info cache for port 5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2581.515818] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]527cf719-c1a9-9d34-b331-6618b933411b, 'name': SearchDatastore_Task, 'duration_secs': 0.008498} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2581.516586] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da5728b2-4efb-4bd4-be1d-093fb48bd5b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2581.521392] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2581.521392] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52577123-6b1a-a83a-4524-3d69899c1800" [ 2581.521392] env[63279]: _type = "Task" [ 2581.521392] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2581.528445] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52577123-6b1a-a83a-4524-3d69899c1800, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2581.696213] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Preparing fetch location {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2581.696458] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Fetch image to [datastore1] OSTACK_IMG_23447fa1-a748-47f2-b1a7-3a2979f3f1e9/OSTACK_IMG_23447fa1-a748-47f2-b1a7-3a2979f3f1e9.vmdk {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2581.696641] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Downloading stream optimized image 7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20 to [datastore1] OSTACK_IMG_23447fa1-a748-47f2-b1a7-3a2979f3f1e9/OSTACK_IMG_23447fa1-a748-47f2-b1a7-3a2979f3f1e9.vmdk on the data store datastore1 as vApp {{(pid=63279) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2581.696807] env[63279]: DEBUG nova.virt.vmwareapi.images [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Downloading image file data 7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20 to the ESX as VM named 'OSTACK_IMG_23447fa1-a748-47f2-b1a7-3a2979f3f1e9' {{(pid=63279) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2581.762522] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2581.762522] env[63279]: value = "resgroup-9" [ 2581.762522] env[63279]: _type = "ResourcePool" [ 2581.762522] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2581.762810] env[63279]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-354ec820-552e-400f-9a53-68d2e1115637 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2581.783480] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lease: (returnval){ [ 2581.783480] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5240946d-14f3-b642-e1e3-964ca4b8ab2b" [ 2581.783480] env[63279]: _type = "HttpNfcLease" [ 2581.783480] env[63279]: } obtained for vApp import into resource pool (val){ [ 2581.783480] env[63279]: value = "resgroup-9" [ 2581.783480] env[63279]: _type = "ResourcePool" [ 2581.783480] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2581.783858] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the lease: (returnval){ [ 2581.783858] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5240946d-14f3-b642-e1e3-964ca4b8ab2b" [ 2581.783858] env[63279]: _type = "HttpNfcLease" [ 2581.783858] env[63279]: } to be ready. {{(pid=63279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2581.789564] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2581.789564] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5240946d-14f3-b642-e1e3-964ca4b8ab2b" [ 2581.789564] env[63279]: _type = "HttpNfcLease" [ 2581.789564] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2582.032013] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52577123-6b1a-a83a-4524-3d69899c1800, 'name': SearchDatastore_Task, 'duration_secs': 0.017515} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2582.032281] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2582.032535] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] cc0d3870-41fa-4cd4-a16d-e52e705f29a3/cc0d3870-41fa-4cd4-a16d-e52e705f29a3.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2582.032813] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbffad55-5bee-4417-bd90-fa498d05b24a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2582.039518] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2582.039518] env[63279]: value = "task-2088360" [ 2582.039518] env[63279]: _type = "Task" [ 2582.039518] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2582.047165] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088360, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2582.071220] env[63279]: DEBUG nova.network.neutron [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updated VIF entry in instance network info cache for port 5fc4a7ed-d188-4387-a1a2-7dc3778475f8. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2582.071561] env[63279]: DEBUG nova.network.neutron [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updating instance_info_cache with network_info: [{"id": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "address": "fa:16:3e:a5:22:54", "network": {"id": "1237aa69-f92f-4996-893d-4007ba590d1d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-544783943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "996f8d6e14a14ac39f207eced547ef33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc4a7ed-d1", "ovs_interfaceid": "5fc4a7ed-d188-4387-a1a2-7dc3778475f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2582.294650] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2582.294650] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5240946d-14f3-b642-e1e3-964ca4b8ab2b" [ 2582.294650] env[63279]: _type = "HttpNfcLease" [ 2582.294650] env[63279]: } is initializing. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2582.549818] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088360, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.417078} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2582.550095] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] cc0d3870-41fa-4cd4-a16d-e52e705f29a3/cc0d3870-41fa-4cd4-a16d-e52e705f29a3.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2582.550310] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2582.550550] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49ccc385-3c3a-48cb-ad21-64cf0512ec67 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2582.556735] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2582.556735] env[63279]: value = "task-2088361" [ 2582.556735] env[63279]: _type = "Task" [ 2582.556735] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2582.563852] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088361, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2582.574433] env[63279]: DEBUG oslo_concurrency.lockutils [req-a3738798-79d9-4982-94a3-aa69303a29ed req-4dc598f8-5bb8-4190-b67b-b66219bb5149 service nova] Releasing lock "refresh_cache-011af8fa-0f76-44a8-8b24-b3d65f5e841e" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2582.792549] env[63279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2582.792549] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5240946d-14f3-b642-e1e3-964ca4b8ab2b" [ 2582.792549] env[63279]: _type = "HttpNfcLease" [ 2582.792549] env[63279]: } is ready. {{(pid=63279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2582.792860] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2582.792860] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5240946d-14f3-b642-e1e3-964ca4b8ab2b" [ 2582.792860] env[63279]: _type = "HttpNfcLease" [ 2582.792860] env[63279]: }. {{(pid=63279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2582.793582] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f3dc78-7b64-4834-abfd-e418f9549f05 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2582.800485] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529f425d-4d0b-422a-d6e5-2433be2e8330/disk-0.vmdk from lease info. {{(pid=63279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2582.800670] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529f425d-4d0b-422a-d6e5-2433be2e8330/disk-0.vmdk. {{(pid=63279) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2582.861984] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-aa344314-03e0-4cc0-a8d1-2d4c6e2efaab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2583.069161] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088361, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059264} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2583.071381] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2583.072299] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1354b3f8-9796-4456-b0b6-854c61bfa780 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2583.097467] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] cc0d3870-41fa-4cd4-a16d-e52e705f29a3/cc0d3870-41fa-4cd4-a16d-e52e705f29a3.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2583.101058] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb56bdc2-0d41-46f2-8c0e-831ff864bb2a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2583.129402] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2583.129402] env[63279]: value = "task-2088362" [ 2583.129402] env[63279]: _type = "Task" [ 2583.129402] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2583.140342] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088362, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2583.642127] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088362, 'name': ReconfigVM_Task, 'duration_secs': 0.310144} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2583.644216] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Reconfigured VM instance instance-00000079 to attach disk [datastore1] cc0d3870-41fa-4cd4-a16d-e52e705f29a3/cc0d3870-41fa-4cd4-a16d-e52e705f29a3.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2583.644535] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d9c3fb35-bdb7-423b-bce4-833bd53d43b7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2583.650985] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2583.650985] env[63279]: value = "task-2088363" [ 2583.650985] env[63279]: _type = "Task" [ 2583.650985] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2583.659331] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088363, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2583.961733] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Completed reading data from the image iterator. {{(pid=63279) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2583.962064] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529f425d-4d0b-422a-d6e5-2433be2e8330/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2583.962964] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2658e2f-5e97-4402-a888-7e241218903b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2583.969657] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529f425d-4d0b-422a-d6e5-2433be2e8330/disk-0.vmdk is in state: ready. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2583.969873] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529f425d-4d0b-422a-d6e5-2433be2e8330/disk-0.vmdk. {{(pid=63279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2583.970131] env[63279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-bab11365-8dcf-4d41-8164-5eab57a7b0db {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2584.160434] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088363, 'name': Rename_Task, 'duration_secs': 0.165416} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2584.161548] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2584.161966] env[63279]: DEBUG oslo_vmware.rw_handles [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529f425d-4d0b-422a-d6e5-2433be2e8330/disk-0.vmdk. {{(pid=63279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2584.162181] env[63279]: INFO nova.virt.vmwareapi.images [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Downloaded image file data 7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20 [ 2584.162409] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-444acfa2-f02c-4687-9a8d-e96badd99931 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2584.164331] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4488cb90-90d4-4a53-9df0-94612d62477e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2584.179425] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe8ea94b-31fd-4752-acdc-87f1e0ec7031 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2584.180740] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2584.180740] env[63279]: value = "task-2088364" [ 2584.180740] env[63279]: _type = "Task" [ 2584.180740] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2584.188477] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088364, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2584.237813] env[63279]: INFO nova.virt.vmwareapi.images [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] The imported VM was unregistered [ 2584.240439] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Caching image {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2584.240708] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Creating directory with path [datastore1] devstack-image-cache_base/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20 {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2584.240982] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65e0773e-80c5-4662-9a84-18246a693749 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2584.251833] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Created directory with path [datastore1] devstack-image-cache_base/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20 {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2584.252077] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_23447fa1-a748-47f2-b1a7-3a2979f3f1e9/OSTACK_IMG_23447fa1-a748-47f2-b1a7-3a2979f3f1e9.vmdk to [datastore1] devstack-image-cache_base/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20.vmdk. {{(pid=63279) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2584.252281] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-f7e07332-68a9-4271-a017-8b9c68f8a84a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2584.258548] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2584.258548] env[63279]: value = "task-2088366" [ 2584.258548] env[63279]: _type = "Task" [ 2584.258548] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2584.266222] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088366, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2584.693820] env[63279]: DEBUG oslo_vmware.api [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088364, 'name': PowerOnVM_Task, 'duration_secs': 0.52116} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2584.694227] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2584.694349] env[63279]: INFO nova.compute.manager [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Took 13.74 seconds to spawn the instance on the hypervisor. [ 2584.694529] env[63279]: DEBUG nova.compute.manager [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2584.695355] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf3b5a5-e147-4499-8f05-ad8dc3922d0c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2584.771315] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088366, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2585.213728] env[63279]: INFO nova.compute.manager [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Took 19.62 seconds to build instance. [ 2585.271741] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088366, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2585.716611] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5e650757-0c4d-48e0-8082-a37179e67256 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "cc0d3870-41fa-4cd4-a16d-e52e705f29a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.126s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2585.772750] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088366, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2586.272371] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088366, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2586.573036] env[63279]: DEBUG nova.compute.manager [req-dc2a758b-28cc-414e-b25d-80866241f012 req-b9e6f1ad-3c6b-4a01-9c62-310cd6bc9b44 service nova] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Received event network-changed-71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2586.573278] env[63279]: DEBUG nova.compute.manager [req-dc2a758b-28cc-414e-b25d-80866241f012 req-b9e6f1ad-3c6b-4a01-9c62-310cd6bc9b44 service nova] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Refreshing instance network info cache due to event network-changed-71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2586.573484] env[63279]: DEBUG oslo_concurrency.lockutils [req-dc2a758b-28cc-414e-b25d-80866241f012 req-b9e6f1ad-3c6b-4a01-9c62-310cd6bc9b44 service nova] Acquiring lock "refresh_cache-cc0d3870-41fa-4cd4-a16d-e52e705f29a3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2586.573635] env[63279]: DEBUG oslo_concurrency.lockutils [req-dc2a758b-28cc-414e-b25d-80866241f012 req-b9e6f1ad-3c6b-4a01-9c62-310cd6bc9b44 service nova] Acquired lock "refresh_cache-cc0d3870-41fa-4cd4-a16d-e52e705f29a3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2586.573819] env[63279]: DEBUG nova.network.neutron [req-dc2a758b-28cc-414e-b25d-80866241f012 req-b9e6f1ad-3c6b-4a01-9c62-310cd6bc9b44 service nova] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Refreshing network info cache for port 71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2586.770713] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088366, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2587.273130] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088366, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.732076} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2587.273421] env[63279]: INFO nova.virt.vmwareapi.ds_util [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_23447fa1-a748-47f2-b1a7-3a2979f3f1e9/OSTACK_IMG_23447fa1-a748-47f2-b1a7-3a2979f3f1e9.vmdk to [datastore1] devstack-image-cache_base/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20.vmdk. [ 2587.273611] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Cleaning up location [datastore1] OSTACK_IMG_23447fa1-a748-47f2-b1a7-3a2979f3f1e9 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2587.273773] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_23447fa1-a748-47f2-b1a7-3a2979f3f1e9 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2587.274576] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb03c7c0-586a-4abf-b300-969ece56a9a0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2587.285445] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2587.285445] env[63279]: value = "task-2088367" [ 2587.285445] env[63279]: _type = "Task" [ 2587.285445] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2587.306978] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088367, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2587.306978] env[63279]: DEBUG nova.network.neutron [req-dc2a758b-28cc-414e-b25d-80866241f012 req-b9e6f1ad-3c6b-4a01-9c62-310cd6bc9b44 service nova] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Updated VIF entry in instance network info cache for port 71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2587.306978] env[63279]: DEBUG nova.network.neutron [req-dc2a758b-28cc-414e-b25d-80866241f012 req-b9e6f1ad-3c6b-4a01-9c62-310cd6bc9b44 service nova] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Updating instance_info_cache with network_info: [{"id": "71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0", "address": "fa:16:3e:dc:94:2d", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71e3fcb5-bc", "ovs_interfaceid": "71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2587.794384] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088367, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.049994} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2587.794779] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2587.794779] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2587.795032] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20.vmdk to [datastore1] 011af8fa-0f76-44a8-8b24-b3d65f5e841e/011af8fa-0f76-44a8-8b24-b3d65f5e841e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2587.821385] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ceadce31-c43d-42ed-9dce-27a393fdbb71 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2587.821385] env[63279]: DEBUG oslo_concurrency.lockutils [req-dc2a758b-28cc-414e-b25d-80866241f012 req-b9e6f1ad-3c6b-4a01-9c62-310cd6bc9b44 service nova] Releasing lock "refresh_cache-cc0d3870-41fa-4cd4-a16d-e52e705f29a3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2587.821385] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2587.821385] env[63279]: value = "task-2088369" [ 2587.821385] env[63279]: _type = "Task" [ 2587.821385] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2587.821385] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088369, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2588.314888] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088369, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2588.498547] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2588.498802] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2588.815905] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088369, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2589.002574] env[63279]: DEBUG nova.compute.utils [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2589.316418] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088369, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2589.506651] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2589.817954] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088369, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2590.318517] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088369, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.173781} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2590.318810] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20/7dcfd34e-94d0-4b88-ab7e-c1e1e3da8f20.vmdk to [datastore1] 011af8fa-0f76-44a8-8b24-b3d65f5e841e/011af8fa-0f76-44a8-8b24-b3d65f5e841e.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2590.319620] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76afd14d-6fd9-43d9-be18-f1eb823d1eb0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.342516] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 011af8fa-0f76-44a8-8b24-b3d65f5e841e/011af8fa-0f76-44a8-8b24-b3d65f5e841e.vmdk or device None with type streamOptimized {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2590.343262] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c5ad026-3f2c-46e7-bc45-3c9e95965821 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.364172] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2590.364172] env[63279]: value = "task-2088370" [ 2590.364172] env[63279]: _type = "Task" [ 2590.364172] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2590.372504] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088370, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2590.579771] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2590.580016] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2590.580275] env[63279]: INFO nova.compute.manager [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Attaching volume 2a14c4d9-b71a-4abd-ba55-429105dd6867 to /dev/sdb [ 2590.611830] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d023417-8d8e-4d1e-96e5-276890396970 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.619109] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532e1382-b8fa-4d12-b8f7-965da2466dcf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.632957] env[63279]: DEBUG nova.virt.block_device [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Updating existing volume attachment record: 3eea93b7-48f7-4db3-9dce-cd680eb04212 {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2590.876936] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088370, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2591.375843] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088370, 'name': ReconfigVM_Task, 'duration_secs': 0.926185} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2591.376140] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 011af8fa-0f76-44a8-8b24-b3d65f5e841e/011af8fa-0f76-44a8-8b24-b3d65f5e841e.vmdk or device None with type streamOptimized {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2591.376763] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5988c700-5f9a-462e-a0e5-c1c09ac26246 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2591.383228] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2591.383228] env[63279]: value = "task-2088375" [ 2591.383228] env[63279]: _type = "Task" [ 2591.383228] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2591.390706] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088375, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2591.893986] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088375, 'name': Rename_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2592.394701] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088375, 'name': Rename_Task, 'duration_secs': 0.589637} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2592.394941] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2592.395224] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-015bf92d-944d-4a2b-be88-9001cd6d3d82 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2592.401540] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2592.401540] env[63279]: value = "task-2088376" [ 2592.401540] env[63279]: _type = "Task" [ 2592.401540] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2592.411827] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088376, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2592.911862] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088376, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2593.412368] env[63279]: DEBUG oslo_vmware.api [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088376, 'name': PowerOnVM_Task, 'duration_secs': 0.582393} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2593.412639] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2593.502665] env[63279]: DEBUG nova.compute.manager [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2593.504313] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6b2027-71d3-450d-b6e5-ceac6f32d3ee {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2594.020359] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ba02c89-d40c-49c3-9dd2-38b76ecb11a9 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 26.732s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2594.352641] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2594.352956] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2594.353197] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2594.353393] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2594.353573] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2594.355638] env[63279]: INFO nova.compute.manager [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Terminating instance [ 2594.860025] env[63279]: DEBUG nova.compute.manager [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2594.860273] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2594.861235] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b051ad1b-2c42-4a25-9cfb-9d729e871e8c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2594.868880] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2594.869116] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dac25635-a78a-4dbe-882a-a28b73b396ad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2594.874787] env[63279]: DEBUG oslo_vmware.api [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2594.874787] env[63279]: value = "task-2088380" [ 2594.874787] env[63279]: _type = "Task" [ 2594.874787] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2594.882415] env[63279]: DEBUG oslo_vmware.api [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088380, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2595.178674] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2595.178968] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427822', 'volume_id': '2a14c4d9-b71a-4abd-ba55-429105dd6867', 'name': 'volume-2a14c4d9-b71a-4abd-ba55-429105dd6867', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '30ac4320-5ee0-424b-9bbb-e2d53277be80', 'attached_at': '', 'detached_at': '', 'volume_id': '2a14c4d9-b71a-4abd-ba55-429105dd6867', 'serial': '2a14c4d9-b71a-4abd-ba55-429105dd6867'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2595.179856] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17dbb3d-eba0-4ec7-91b2-6d15245ee39c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.195956] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf5e9fe-b2d8-4f2f-8af6-cf97bcd4b79e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.220075] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] volume-2a14c4d9-b71a-4abd-ba55-429105dd6867/volume-2a14c4d9-b71a-4abd-ba55-429105dd6867.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2595.220337] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86a9ecaa-fd36-4897-84ff-494b018afee5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.237221] env[63279]: DEBUG oslo_vmware.api [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2595.237221] env[63279]: value = "task-2088381" [ 2595.237221] env[63279]: _type = "Task" [ 2595.237221] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2595.246200] env[63279]: DEBUG oslo_vmware.api [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088381, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2595.384796] env[63279]: DEBUG oslo_vmware.api [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088380, 'name': PowerOffVM_Task, 'duration_secs': 0.177403} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2595.385070] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2595.385249] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2595.385496] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9aa8710c-c3c8-4f04-936b-392203dddfb5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.747365] env[63279]: DEBUG oslo_vmware.api [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088381, 'name': ReconfigVM_Task, 'duration_secs': 0.369534} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2595.747587] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Reconfigured VM instance instance-00000078 to attach disk [datastore1] volume-2a14c4d9-b71a-4abd-ba55-429105dd6867/volume-2a14c4d9-b71a-4abd-ba55-429105dd6867.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2595.752201] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bba049d3-ac3b-4f30-b698-03a9c6ac7390 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.766571] env[63279]: DEBUG oslo_vmware.api [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2595.766571] env[63279]: value = "task-2088383" [ 2595.766571] env[63279]: _type = "Task" [ 2595.766571] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2595.774378] env[63279]: DEBUG oslo_vmware.api [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088383, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2596.276633] env[63279]: DEBUG oslo_vmware.api [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088383, 'name': ReconfigVM_Task, 'duration_secs': 0.154533} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2596.276909] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427822', 'volume_id': '2a14c4d9-b71a-4abd-ba55-429105dd6867', 'name': 'volume-2a14c4d9-b71a-4abd-ba55-429105dd6867', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '30ac4320-5ee0-424b-9bbb-e2d53277be80', 'attached_at': '', 'detached_at': '', 'volume_id': '2a14c4d9-b71a-4abd-ba55-429105dd6867', 'serial': '2a14c4d9-b71a-4abd-ba55-429105dd6867'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2597.316574] env[63279]: DEBUG nova.objects.instance [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lazy-loading 'flavor' on Instance uuid 30ac4320-5ee0-424b-9bbb-e2d53277be80 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2597.822845] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5ebb6114-b63d-43a3-b9ce-5c83731b291e tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.243s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2598.954604] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "70f2ea82-1b68-478e-8195-f9754eb051ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2598.954880] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "70f2ea82-1b68-478e-8195-f9754eb051ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2599.078797] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2599.079012] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2599.079220] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleting the datastore file [datastore1] 011af8fa-0f76-44a8-8b24-b3d65f5e841e {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2599.079545] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b7cfb73-f623-4541-a37e-80e0cc1e98d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2599.086469] env[63279]: DEBUG oslo_vmware.api [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for the task: (returnval){ [ 2599.086469] env[63279]: value = "task-2088386" [ 2599.086469] env[63279]: _type = "Task" [ 2599.086469] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2599.094322] env[63279]: DEBUG oslo_vmware.api [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2599.457644] env[63279]: DEBUG nova.compute.manager [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2599.597162] env[63279]: DEBUG oslo_vmware.api [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Task: {'id': task-2088386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179366} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2599.597429] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2599.597616] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2599.597796] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2599.597976] env[63279]: INFO nova.compute.manager [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Took 4.74 seconds to destroy the instance on the hypervisor. [ 2599.598253] env[63279]: DEBUG oslo.service.loopingcall [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2599.598429] env[63279]: DEBUG nova.compute.manager [-] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2599.598525] env[63279]: DEBUG nova.network.neutron [-] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2599.980290] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2599.980571] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2599.982214] env[63279]: INFO nova.compute.claims [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2600.020113] env[63279]: DEBUG nova.compute.manager [req-030f0d1a-b10f-4b2e-a6f1-1e81b751bc49 req-38f3efd9-d346-495a-91af-ae8e2e5b788b service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Received event network-vif-deleted-5fc4a7ed-d188-4387-a1a2-7dc3778475f8 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2600.020113] env[63279]: INFO nova.compute.manager [req-030f0d1a-b10f-4b2e-a6f1-1e81b751bc49 req-38f3efd9-d346-495a-91af-ae8e2e5b788b service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Neutron deleted interface 5fc4a7ed-d188-4387-a1a2-7dc3778475f8; detaching it from the instance and deleting it from the info cache [ 2600.020499] env[63279]: DEBUG nova.network.neutron [req-030f0d1a-b10f-4b2e-a6f1-1e81b751bc49 req-38f3efd9-d346-495a-91af-ae8e2e5b788b service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2600.497853] env[63279]: DEBUG nova.network.neutron [-] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2600.524511] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4fe765f5-c62d-49a2-bf70-25513318a1b2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2600.534939] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be067f93-2c84-434c-b927-4ed679f40f43 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2600.562742] env[63279]: DEBUG nova.compute.manager [req-030f0d1a-b10f-4b2e-a6f1-1e81b751bc49 req-38f3efd9-d346-495a-91af-ae8e2e5b788b service nova] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Detach interface failed, port_id=5fc4a7ed-d188-4387-a1a2-7dc3778475f8, reason: Instance 011af8fa-0f76-44a8-8b24-b3d65f5e841e could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2601.000696] env[63279]: INFO nova.compute.manager [-] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Took 1.40 seconds to deallocate network for instance. [ 2601.063670] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64b03ab-1cfd-4c52-94df-601c42532411 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2601.071903] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2f2213-0609-4361-8505-073d26b51c49 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2601.102877] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8456f0-8fe9-4e9e-acc6-5ccda0c9ee1b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2601.110584] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e4b1a5-a28b-4ad9-88d8-d2f214b4d1bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2601.124157] env[63279]: DEBUG nova.compute.provider_tree [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2601.507546] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2601.643668] env[63279]: ERROR nova.scheduler.client.report [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [req-9c9b8561-de9e-465f-b77f-204fd3465fe9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9c9b8561-de9e-465f-b77f-204fd3465fe9"}]} [ 2601.661500] env[63279]: DEBUG nova.scheduler.client.report [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2601.673578] env[63279]: DEBUG nova.scheduler.client.report [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2601.673793] env[63279]: DEBUG nova.compute.provider_tree [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2601.684907] env[63279]: DEBUG nova.scheduler.client.report [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2601.701773] env[63279]: DEBUG nova.scheduler.client.report [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2601.762869] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e338c56-f80b-4979-8f0f-29df94636bab {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2601.770965] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8233979c-fa12-4f86-8a9a-3c46f7c12330 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2601.799903] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da12d3c6-68ee-4ac1-bf0f-b6b27277af65 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2601.806778] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fea47bd-2d69-4d94-939e-b6045d034077 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2601.819399] env[63279]: DEBUG nova.compute.provider_tree [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2602.356641] env[63279]: DEBUG nova.scheduler.client.report [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 187 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2602.356921] env[63279]: DEBUG nova.compute.provider_tree [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 187 to 188 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2602.357103] env[63279]: DEBUG nova.compute.provider_tree [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2602.862105] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.881s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2602.862636] env[63279]: DEBUG nova.compute.manager [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2602.867328] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.360s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2602.867585] env[63279]: DEBUG nova.objects.instance [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lazy-loading 'resources' on Instance uuid 011af8fa-0f76-44a8-8b24-b3d65f5e841e {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2603.370787] env[63279]: DEBUG nova.compute.utils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2603.374914] env[63279]: DEBUG nova.compute.manager [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2603.375096] env[63279]: DEBUG nova.network.neutron [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2603.422843] env[63279]: DEBUG nova.policy [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '233f5d339e7b438e910eb03d33891c16', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5fa421934ecd4054a70528644a40349e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2603.438061] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84cf6108-b3a9-4773-9ded-33ab07b8107c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2603.446397] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb7dedf-e938-4c45-a16d-4294b113065d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2603.478390] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b31e66c9-6cdf-4afe-97a8-84f4e2793a3f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2603.485918] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5996181-bdc8-4563-b58f-dfd4ae5cac63 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2603.498924] env[63279]: DEBUG nova.compute.provider_tree [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2603.775935] env[63279]: DEBUG nova.network.neutron [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Successfully created port: 7d6d4bb4-a996-4572-a198-cad6b5f0105c {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2603.876060] env[63279]: DEBUG nova.compute.manager [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2604.002508] env[63279]: DEBUG nova.scheduler.client.report [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2604.507707] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.640s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2604.539150] env[63279]: INFO nova.scheduler.client.report [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Deleted allocations for instance 011af8fa-0f76-44a8-8b24-b3d65f5e841e [ 2604.886101] env[63279]: DEBUG nova.compute.manager [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2604.913945] env[63279]: DEBUG nova.virt.hardware [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2604.914199] env[63279]: DEBUG nova.virt.hardware [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2604.914365] env[63279]: DEBUG nova.virt.hardware [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2604.914550] env[63279]: DEBUG nova.virt.hardware [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2604.914704] env[63279]: DEBUG nova.virt.hardware [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2604.914849] env[63279]: DEBUG nova.virt.hardware [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2604.915073] env[63279]: DEBUG nova.virt.hardware [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2604.915247] env[63279]: DEBUG nova.virt.hardware [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2604.915500] env[63279]: DEBUG nova.virt.hardware [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2604.915673] env[63279]: DEBUG nova.virt.hardware [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2604.915852] env[63279]: DEBUG nova.virt.hardware [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2604.916751] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa709129-5402-4a5b-ae4d-e4aaed17efb3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2604.925406] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8206e5af-9000-42cf-b7c1-16feb71e73cf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.046862] env[63279]: DEBUG oslo_concurrency.lockutils [None req-fadc7386-d772-43bc-84c1-71fa11034213 tempest-ServerActionsTestOtherB-605016737 tempest-ServerActionsTestOtherB-605016737-project-member] Lock "011af8fa-0f76-44a8-8b24-b3d65f5e841e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.694s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2605.147639] env[63279]: DEBUG nova.compute.manager [req-a7667822-7545-4486-bc75-8a49903a99cd req-c732492d-4d01-43ec-b7c4-c93147429d41 service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Received event network-vif-plugged-7d6d4bb4-a996-4572-a198-cad6b5f0105c {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2605.147860] env[63279]: DEBUG oslo_concurrency.lockutils [req-a7667822-7545-4486-bc75-8a49903a99cd req-c732492d-4d01-43ec-b7c4-c93147429d41 service nova] Acquiring lock "70f2ea82-1b68-478e-8195-f9754eb051ae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2605.148086] env[63279]: DEBUG oslo_concurrency.lockutils [req-a7667822-7545-4486-bc75-8a49903a99cd req-c732492d-4d01-43ec-b7c4-c93147429d41 service nova] Lock "70f2ea82-1b68-478e-8195-f9754eb051ae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2605.148259] env[63279]: DEBUG oslo_concurrency.lockutils [req-a7667822-7545-4486-bc75-8a49903a99cd req-c732492d-4d01-43ec-b7c4-c93147429d41 service nova] Lock "70f2ea82-1b68-478e-8195-f9754eb051ae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2605.148429] env[63279]: DEBUG nova.compute.manager [req-a7667822-7545-4486-bc75-8a49903a99cd req-c732492d-4d01-43ec-b7c4-c93147429d41 service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] No waiting events found dispatching network-vif-plugged-7d6d4bb4-a996-4572-a198-cad6b5f0105c {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2605.148596] env[63279]: WARNING nova.compute.manager [req-a7667822-7545-4486-bc75-8a49903a99cd req-c732492d-4d01-43ec-b7c4-c93147429d41 service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Received unexpected event network-vif-plugged-7d6d4bb4-a996-4572-a198-cad6b5f0105c for instance with vm_state building and task_state spawning. [ 2605.229761] env[63279]: DEBUG nova.network.neutron [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Successfully updated port: 7d6d4bb4-a996-4572-a198-cad6b5f0105c {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2605.732340] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "refresh_cache-70f2ea82-1b68-478e-8195-f9754eb051ae" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2605.732683] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired lock "refresh_cache-70f2ea82-1b68-478e-8195-f9754eb051ae" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2605.732683] env[63279]: DEBUG nova.network.neutron [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2606.265473] env[63279]: DEBUG nova.network.neutron [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2606.398471] env[63279]: DEBUG nova.network.neutron [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Updating instance_info_cache with network_info: [{"id": "7d6d4bb4-a996-4572-a198-cad6b5f0105c", "address": "fa:16:3e:58:15:8e", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d6d4bb4-a9", "ovs_interfaceid": "7d6d4bb4-a996-4572-a198-cad6b5f0105c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2606.901671] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Releasing lock "refresh_cache-70f2ea82-1b68-478e-8195-f9754eb051ae" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2606.902089] env[63279]: DEBUG nova.compute.manager [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Instance network_info: |[{"id": "7d6d4bb4-a996-4572-a198-cad6b5f0105c", "address": "fa:16:3e:58:15:8e", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d6d4bb4-a9", "ovs_interfaceid": "7d6d4bb4-a996-4572-a198-cad6b5f0105c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2606.902461] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:15:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e41070eb-3ac1-4ca9-a3d0-fd65893a97de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d6d4bb4-a996-4572-a198-cad6b5f0105c', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2606.910020] env[63279]: DEBUG oslo.service.loopingcall [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2606.911388] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2606.913910] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3cf0b825-6bbe-4268-b1ec-8627aad5335b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2606.929643] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "033a1d6d-db52-4902-8994-4d1537ab8658" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2606.929939] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "033a1d6d-db52-4902-8994-4d1537ab8658" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2606.938889] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2606.938889] env[63279]: value = "task-2088390" [ 2606.938889] env[63279]: _type = "Task" [ 2606.938889] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2606.952064] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088390, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2607.175879] env[63279]: DEBUG nova.compute.manager [req-2b3ea742-73f4-4deb-8a57-e697615410ef req-0e4a78d6-b68f-45b0-9d57-752bddf2559b service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Received event network-changed-7d6d4bb4-a996-4572-a198-cad6b5f0105c {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2607.176077] env[63279]: DEBUG nova.compute.manager [req-2b3ea742-73f4-4deb-8a57-e697615410ef req-0e4a78d6-b68f-45b0-9d57-752bddf2559b service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Refreshing instance network info cache due to event network-changed-7d6d4bb4-a996-4572-a198-cad6b5f0105c. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2607.176305] env[63279]: DEBUG oslo_concurrency.lockutils [req-2b3ea742-73f4-4deb-8a57-e697615410ef req-0e4a78d6-b68f-45b0-9d57-752bddf2559b service nova] Acquiring lock "refresh_cache-70f2ea82-1b68-478e-8195-f9754eb051ae" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2607.176455] env[63279]: DEBUG oslo_concurrency.lockutils [req-2b3ea742-73f4-4deb-8a57-e697615410ef req-0e4a78d6-b68f-45b0-9d57-752bddf2559b service nova] Acquired lock "refresh_cache-70f2ea82-1b68-478e-8195-f9754eb051ae" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2607.176619] env[63279]: DEBUG nova.network.neutron [req-2b3ea742-73f4-4deb-8a57-e697615410ef req-0e4a78d6-b68f-45b0-9d57-752bddf2559b service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Refreshing network info cache for port 7d6d4bb4-a996-4572-a198-cad6b5f0105c {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2607.432990] env[63279]: DEBUG nova.compute.manager [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2607.449082] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088390, 'name': CreateVM_Task, 'duration_secs': 0.34894} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2607.449245] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2607.449930] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2607.450121] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2607.450441] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2607.450686] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e96c559-9e7e-439b-bfb7-101ab22d29dd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.456119] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2607.456119] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]529cb14d-8b86-fbc9-1706-6aa0d85ac4d5" [ 2607.456119] env[63279]: _type = "Task" [ 2607.456119] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2607.463872] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529cb14d-8b86-fbc9-1706-6aa0d85ac4d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2607.874611] env[63279]: DEBUG nova.network.neutron [req-2b3ea742-73f4-4deb-8a57-e697615410ef req-0e4a78d6-b68f-45b0-9d57-752bddf2559b service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Updated VIF entry in instance network info cache for port 7d6d4bb4-a996-4572-a198-cad6b5f0105c. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2607.874967] env[63279]: DEBUG nova.network.neutron [req-2b3ea742-73f4-4deb-8a57-e697615410ef req-0e4a78d6-b68f-45b0-9d57-752bddf2559b service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Updating instance_info_cache with network_info: [{"id": "7d6d4bb4-a996-4572-a198-cad6b5f0105c", "address": "fa:16:3e:58:15:8e", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d6d4bb4-a9", "ovs_interfaceid": "7d6d4bb4-a996-4572-a198-cad6b5f0105c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2607.956872] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2607.957159] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2607.958774] env[63279]: INFO nova.compute.claims [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2607.971831] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]529cb14d-8b86-fbc9-1706-6aa0d85ac4d5, 'name': SearchDatastore_Task, 'duration_secs': 0.013049} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2607.972136] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2607.972365] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2607.972597] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2607.972747] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2607.972930] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2607.973248] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f114b55-c93f-4067-b444-171f4fa662fd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.982009] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2607.982114] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2607.982773] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7000c31-cb91-4bb8-be11-1692e8480e9e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.988117] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2607.988117] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52f11ec1-55a5-4a0f-630e-64432968f000" [ 2607.988117] env[63279]: _type = "Task" [ 2607.988117] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2607.996332] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f11ec1-55a5-4a0f-630e-64432968f000, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2608.377736] env[63279]: DEBUG oslo_concurrency.lockutils [req-2b3ea742-73f4-4deb-8a57-e697615410ef req-0e4a78d6-b68f-45b0-9d57-752bddf2559b service nova] Releasing lock "refresh_cache-70f2ea82-1b68-478e-8195-f9754eb051ae" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2608.498492] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52f11ec1-55a5-4a0f-630e-64432968f000, 'name': SearchDatastore_Task, 'duration_secs': 0.009465} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2608.499211] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0247e6eb-12a3-4fa9-b627-e997b16edd9b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.505132] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2608.505132] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52735c5f-0c8a-eab4-f3d3-8fc764955b20" [ 2608.505132] env[63279]: _type = "Task" [ 2608.505132] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2608.512551] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52735c5f-0c8a-eab4-f3d3-8fc764955b20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2609.019327] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52735c5f-0c8a-eab4-f3d3-8fc764955b20, 'name': SearchDatastore_Task, 'duration_secs': 0.011306} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2609.019605] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2609.019864] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 70f2ea82-1b68-478e-8195-f9754eb051ae/70f2ea82-1b68-478e-8195-f9754eb051ae.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2609.020102] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7050795c-0c52-4438-973a-ff28c257e5f5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2609.029505] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2609.029505] env[63279]: value = "task-2088391" [ 2609.029505] env[63279]: _type = "Task" [ 2609.029505] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2609.040169] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088391, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2609.045286] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658992e9-4520-4fc4-b3fc-b359cfed221f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2609.052336] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5329f67a-14dd-48e1-bf7f-43ef1a3c3d20 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2609.082529] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688f59f7-3f9a-4e0b-819f-f8c89a3584f8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2609.090401] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be44821-3b2c-473a-9de0-5e5dccdd3748 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2609.104421] env[63279]: DEBUG nova.compute.provider_tree [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2609.540176] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088391, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483282} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2609.543818] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 70f2ea82-1b68-478e-8195-f9754eb051ae/70f2ea82-1b68-478e-8195-f9754eb051ae.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2609.543818] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2609.543818] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65a5fb38-63db-4d96-9e6e-1df5dd9db183 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2609.557396] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2609.557396] env[63279]: value = "task-2088392" [ 2609.557396] env[63279]: _type = "Task" [ 2609.557396] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2609.569106] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088392, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2609.625955] env[63279]: ERROR nova.scheduler.client.report [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [req-f656f00d-4f8c-45aa-928d-a30364201f8d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f656f00d-4f8c-45aa-928d-a30364201f8d"}]} [ 2609.647477] env[63279]: DEBUG nova.scheduler.client.report [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2609.667960] env[63279]: DEBUG nova.scheduler.client.report [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2609.668232] env[63279]: DEBUG nova.compute.provider_tree [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2609.683326] env[63279]: DEBUG nova.scheduler.client.report [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2609.701249] env[63279]: DEBUG nova.scheduler.client.report [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2609.784318] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9221b7f6-6722-415d-bbec-103a9cdd2884 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2609.793774] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473f7dfe-e800-45aa-a832-2598be0f8783 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2609.826146] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5f5e85-df46-47a1-99e6-e89748db3f3a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2609.836961] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a64610b-c749-42f2-b1b2-1fe7ba95f140 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2609.855323] env[63279]: DEBUG nova.compute.provider_tree [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2610.068576] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088392, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080965} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2610.068890] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2610.074189] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f001d3e3-ac14-4231-8ca3-1a8e9393438c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.092567] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 70f2ea82-1b68-478e-8195-f9754eb051ae/70f2ea82-1b68-478e-8195-f9754eb051ae.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2610.093947] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-439c66a6-ca39-4364-82bc-bf64237aa5fd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.114871] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2610.114871] env[63279]: value = "task-2088393" [ 2610.114871] env[63279]: _type = "Task" [ 2610.114871] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2610.123362] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088393, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2610.359890] env[63279]: DEBUG nova.scheduler.client.report [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2610.625251] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088393, 'name': ReconfigVM_Task, 'duration_secs': 0.318325} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2610.625526] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 70f2ea82-1b68-478e-8195-f9754eb051ae/70f2ea82-1b68-478e-8195-f9754eb051ae.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2610.626185] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-18c69a17-f1c8-4170-9250-f7f714610503 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.633443] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2610.633443] env[63279]: value = "task-2088394" [ 2610.633443] env[63279]: _type = "Task" [ 2610.633443] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2610.642193] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088394, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2610.865554] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.908s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2610.866039] env[63279]: DEBUG nova.compute.manager [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2611.144298] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088394, 'name': Rename_Task, 'duration_secs': 0.143024} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2611.144629] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2611.144800] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87da7891-4177-413c-987b-04dcca6f3431 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2611.151882] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2611.151882] env[63279]: value = "task-2088395" [ 2611.151882] env[63279]: _type = "Task" [ 2611.151882] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2611.161567] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088395, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2611.371205] env[63279]: DEBUG nova.compute.utils [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2611.372666] env[63279]: DEBUG nova.compute.manager [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2611.372804] env[63279]: DEBUG nova.network.neutron [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2611.415565] env[63279]: DEBUG nova.policy [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7cd64afda9f4eb488bdf76a36f2fee6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f9427c264e8e41998f579af352cb48cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2611.662162] env[63279]: DEBUG oslo_vmware.api [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088395, 'name': PowerOnVM_Task, 'duration_secs': 0.44431} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2611.662515] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2611.662761] env[63279]: INFO nova.compute.manager [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Took 6.78 seconds to spawn the instance on the hypervisor. [ 2611.666603] env[63279]: DEBUG nova.compute.manager [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2611.666603] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc16fca0-d6b7-4ff4-981e-1fe8d69522ed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2611.693855] env[63279]: DEBUG nova.network.neutron [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Successfully created port: 75bab2c4-7019-4de5-a02b-1b81e9419409 {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2611.876972] env[63279]: DEBUG nova.compute.manager [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2612.182693] env[63279]: INFO nova.compute.manager [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Took 12.22 seconds to build instance. [ 2612.382559] env[63279]: INFO nova.virt.block_device [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Booting with volume b193588b-4d25-4ebf-8400-07b3cc22c215 at /dev/sda [ 2612.419363] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-26b139c6-dc48-405a-995c-8c0131bbb53f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2612.429663] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1cd7638-aae1-404f-95a1-e1748ae67883 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2612.456947] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63a97605-086e-4bc7-8bbb-0a6354e9cd44 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2612.465245] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b288cafc-a65c-4f55-9df5-528b000d003a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2612.492269] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a0ed77-9017-4d5c-9dd0-eeee943b02e3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2612.498225] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477d15b3-f7c5-46b3-8ccf-cf7584a64095 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2612.512862] env[63279]: DEBUG nova.virt.block_device [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating existing volume attachment record: 68cf9314-7af6-4ec5-a434-8c1f8cd3f47a {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2612.684852] env[63279]: DEBUG oslo_concurrency.lockutils [None req-4f78e2f8-a484-4579-ae26-282747501a66 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "70f2ea82-1b68-478e-8195-f9754eb051ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.730s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2613.200131] env[63279]: DEBUG nova.compute.manager [req-27360fdf-9f43-4611-a10b-80a88c155632 req-276043a8-4def-462a-917d-56173cc05017 service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Received event network-changed-7d6d4bb4-a996-4572-a198-cad6b5f0105c {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2613.200364] env[63279]: DEBUG nova.compute.manager [req-27360fdf-9f43-4611-a10b-80a88c155632 req-276043a8-4def-462a-917d-56173cc05017 service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Refreshing instance network info cache due to event network-changed-7d6d4bb4-a996-4572-a198-cad6b5f0105c. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2613.200569] env[63279]: DEBUG oslo_concurrency.lockutils [req-27360fdf-9f43-4611-a10b-80a88c155632 req-276043a8-4def-462a-917d-56173cc05017 service nova] Acquiring lock "refresh_cache-70f2ea82-1b68-478e-8195-f9754eb051ae" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2613.200715] env[63279]: DEBUG oslo_concurrency.lockutils [req-27360fdf-9f43-4611-a10b-80a88c155632 req-276043a8-4def-462a-917d-56173cc05017 service nova] Acquired lock "refresh_cache-70f2ea82-1b68-478e-8195-f9754eb051ae" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2613.200877] env[63279]: DEBUG nova.network.neutron [req-27360fdf-9f43-4611-a10b-80a88c155632 req-276043a8-4def-462a-917d-56173cc05017 service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Refreshing network info cache for port 7d6d4bb4-a996-4572-a198-cad6b5f0105c {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2613.912283] env[63279]: DEBUG nova.network.neutron [req-27360fdf-9f43-4611-a10b-80a88c155632 req-276043a8-4def-462a-917d-56173cc05017 service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Updated VIF entry in instance network info cache for port 7d6d4bb4-a996-4572-a198-cad6b5f0105c. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2613.912659] env[63279]: DEBUG nova.network.neutron [req-27360fdf-9f43-4611-a10b-80a88c155632 req-276043a8-4def-462a-917d-56173cc05017 service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Updating instance_info_cache with network_info: [{"id": "7d6d4bb4-a996-4572-a198-cad6b5f0105c", "address": "fa:16:3e:58:15:8e", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d6d4bb4-a9", "ovs_interfaceid": "7d6d4bb4-a996-4572-a198-cad6b5f0105c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2614.414979] env[63279]: DEBUG oslo_concurrency.lockutils [req-27360fdf-9f43-4611-a10b-80a88c155632 req-276043a8-4def-462a-917d-56173cc05017 service nova] Releasing lock "refresh_cache-70f2ea82-1b68-478e-8195-f9754eb051ae" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2614.595891] env[63279]: DEBUG nova.compute.manager [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2614.596489] env[63279]: DEBUG nova.virt.hardware [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2614.596714] env[63279]: DEBUG nova.virt.hardware [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2614.596894] env[63279]: DEBUG nova.virt.hardware [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2614.597110] env[63279]: DEBUG nova.virt.hardware [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2614.597269] env[63279]: DEBUG nova.virt.hardware [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2614.597443] env[63279]: DEBUG nova.virt.hardware [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2614.597672] env[63279]: DEBUG nova.virt.hardware [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2614.597845] env[63279]: DEBUG nova.virt.hardware [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2614.598185] env[63279]: DEBUG nova.virt.hardware [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2614.598402] env[63279]: DEBUG nova.virt.hardware [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2614.598609] env[63279]: DEBUG nova.virt.hardware [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2614.599549] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f8b94a-5104-4ef9-8d69-4a12089a4b4d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2614.609231] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f609c77-6c0b-460d-a785-e345809b2350 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2618.582507] env[63279]: DEBUG nova.compute.manager [req-7ef7471d-517e-482a-b3a5-43043d980abb req-190ad6e8-e79d-4526-90d4-e5aedc0ebb20 service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Received event network-vif-plugged-75bab2c4-7019-4de5-a02b-1b81e9419409 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2618.582794] env[63279]: DEBUG oslo_concurrency.lockutils [req-7ef7471d-517e-482a-b3a5-43043d980abb req-190ad6e8-e79d-4526-90d4-e5aedc0ebb20 service nova] Acquiring lock "033a1d6d-db52-4902-8994-4d1537ab8658-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2618.582960] env[63279]: DEBUG oslo_concurrency.lockutils [req-7ef7471d-517e-482a-b3a5-43043d980abb req-190ad6e8-e79d-4526-90d4-e5aedc0ebb20 service nova] Lock "033a1d6d-db52-4902-8994-4d1537ab8658-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2618.583133] env[63279]: DEBUG oslo_concurrency.lockutils [req-7ef7471d-517e-482a-b3a5-43043d980abb req-190ad6e8-e79d-4526-90d4-e5aedc0ebb20 service nova] Lock "033a1d6d-db52-4902-8994-4d1537ab8658-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2618.583349] env[63279]: DEBUG nova.compute.manager [req-7ef7471d-517e-482a-b3a5-43043d980abb req-190ad6e8-e79d-4526-90d4-e5aedc0ebb20 service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] No waiting events found dispatching network-vif-plugged-75bab2c4-7019-4de5-a02b-1b81e9419409 {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2618.583520] env[63279]: WARNING nova.compute.manager [req-7ef7471d-517e-482a-b3a5-43043d980abb req-190ad6e8-e79d-4526-90d4-e5aedc0ebb20 service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Received unexpected event network-vif-plugged-75bab2c4-7019-4de5-a02b-1b81e9419409 for instance with vm_state building and task_state spawning. [ 2618.677446] env[63279]: DEBUG nova.network.neutron [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Successfully updated port: 75bab2c4-7019-4de5-a02b-1b81e9419409 {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2619.179116] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2619.179320] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2619.179369] env[63279]: DEBUG nova.network.neutron [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2619.710510] env[63279]: DEBUG nova.network.neutron [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2619.831893] env[63279]: DEBUG nova.network.neutron [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance_info_cache with network_info: [{"id": "75bab2c4-7019-4de5-a02b-1b81e9419409", "address": "fa:16:3e:33:b2:0d", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75bab2c4-70", "ovs_interfaceid": "75bab2c4-7019-4de5-a02b-1b81e9419409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2620.334813] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2620.335119] env[63279]: DEBUG nova.compute.manager [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Instance network_info: |[{"id": "75bab2c4-7019-4de5-a02b-1b81e9419409", "address": "fa:16:3e:33:b2:0d", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75bab2c4-70", "ovs_interfaceid": "75bab2c4-7019-4de5-a02b-1b81e9419409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2620.335597] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:b2:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75bab2c4-7019-4de5-a02b-1b81e9419409', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2620.342872] env[63279]: DEBUG oslo.service.loopingcall [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2620.343085] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2620.343342] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8cd1edd-b76c-4b97-9569-118e613a97f3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2620.364064] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2620.364064] env[63279]: value = "task-2088396" [ 2620.364064] env[63279]: _type = "Task" [ 2620.364064] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2620.371623] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088396, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2620.607369] env[63279]: DEBUG nova.compute.manager [req-e4f110f3-518c-4aa4-87d3-fcdfbd9f9077 req-a4d1e36d-2034-48da-8763-88b2053aca41 service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Received event network-changed-75bab2c4-7019-4de5-a02b-1b81e9419409 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2620.607558] env[63279]: DEBUG nova.compute.manager [req-e4f110f3-518c-4aa4-87d3-fcdfbd9f9077 req-a4d1e36d-2034-48da-8763-88b2053aca41 service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Refreshing instance network info cache due to event network-changed-75bab2c4-7019-4de5-a02b-1b81e9419409. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2620.607781] env[63279]: DEBUG oslo_concurrency.lockutils [req-e4f110f3-518c-4aa4-87d3-fcdfbd9f9077 req-a4d1e36d-2034-48da-8763-88b2053aca41 service nova] Acquiring lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2620.607967] env[63279]: DEBUG oslo_concurrency.lockutils [req-e4f110f3-518c-4aa4-87d3-fcdfbd9f9077 req-a4d1e36d-2034-48da-8763-88b2053aca41 service nova] Acquired lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2620.608111] env[63279]: DEBUG nova.network.neutron [req-e4f110f3-518c-4aa4-87d3-fcdfbd9f9077 req-a4d1e36d-2034-48da-8763-88b2053aca41 service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Refreshing network info cache for port 75bab2c4-7019-4de5-a02b-1b81e9419409 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2620.874669] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088396, 'name': CreateVM_Task, 'duration_secs': 0.463762} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2620.875155] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2620.875762] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sda', 'delete_on_termination': True, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427823', 'volume_id': 'b193588b-4d25-4ebf-8400-07b3cc22c215', 'name': 'volume-b193588b-4d25-4ebf-8400-07b3cc22c215', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '033a1d6d-db52-4902-8994-4d1537ab8658', 'attached_at': '', 'detached_at': '', 'volume_id': 'b193588b-4d25-4ebf-8400-07b3cc22c215', 'serial': 'b193588b-4d25-4ebf-8400-07b3cc22c215'}, 'boot_index': 0, 'device_type': None, 'attachment_id': '68cf9314-7af6-4ec5-a434-8c1f8cd3f47a', 'volume_type': None}], 'swap': None} {{(pid=63279) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2620.875957] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Root volume attach. Driver type: vmdk {{(pid=63279) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 2620.876742] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8cb9df-066d-47c7-a47f-bcbe2c9c9abd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2620.884147] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c0c36c8-5ded-4b93-82ac-32c445b7698e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2620.890140] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525be7dd-f9ae-41a4-bcff-b2e58b3879a8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2620.896370] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-ea001d75-8e11-4233-8ec8-b8942f2c6f55 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2620.903954] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2620.903954] env[63279]: value = "task-2088397" [ 2620.903954] env[63279]: _type = "Task" [ 2620.903954] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2620.911622] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088397, 'name': RelocateVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2621.310755] env[63279]: DEBUG nova.network.neutron [req-e4f110f3-518c-4aa4-87d3-fcdfbd9f9077 req-a4d1e36d-2034-48da-8763-88b2053aca41 service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updated VIF entry in instance network info cache for port 75bab2c4-7019-4de5-a02b-1b81e9419409. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2621.311143] env[63279]: DEBUG nova.network.neutron [req-e4f110f3-518c-4aa4-87d3-fcdfbd9f9077 req-a4d1e36d-2034-48da-8763-88b2053aca41 service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance_info_cache with network_info: [{"id": "75bab2c4-7019-4de5-a02b-1b81e9419409", "address": "fa:16:3e:33:b2:0d", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75bab2c4-70", "ovs_interfaceid": "75bab2c4-7019-4de5-a02b-1b81e9419409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2621.413259] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088397, 'name': RelocateVM_Task, 'duration_secs': 0.394346} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2621.413469] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2621.413677] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427823', 'volume_id': 'b193588b-4d25-4ebf-8400-07b3cc22c215', 'name': 'volume-b193588b-4d25-4ebf-8400-07b3cc22c215', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '033a1d6d-db52-4902-8994-4d1537ab8658', 'attached_at': '', 'detached_at': '', 'volume_id': 'b193588b-4d25-4ebf-8400-07b3cc22c215', 'serial': 'b193588b-4d25-4ebf-8400-07b3cc22c215'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2621.414462] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd7d502-2ecf-40d8-ad27-3f09243f840d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2621.429404] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfdfff0-d34c-480b-908f-3b298fcb89a7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2621.451500] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] volume-b193588b-4d25-4ebf-8400-07b3cc22c215/volume-b193588b-4d25-4ebf-8400-07b3cc22c215.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2621.452098] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f3b9a22-101c-4493-b45e-8b5d7b7499c0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2621.471744] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2621.471744] env[63279]: value = "task-2088398" [ 2621.471744] env[63279]: _type = "Task" [ 2621.471744] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2621.479642] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088398, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2621.814260] env[63279]: DEBUG oslo_concurrency.lockutils [req-e4f110f3-518c-4aa4-87d3-fcdfbd9f9077 req-a4d1e36d-2034-48da-8763-88b2053aca41 service nova] Releasing lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2621.982030] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088398, 'name': ReconfigVM_Task, 'duration_secs': 0.261019} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2621.982385] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Reconfigured VM instance instance-0000007b to attach disk [datastore1] volume-b193588b-4d25-4ebf-8400-07b3cc22c215/volume-b193588b-4d25-4ebf-8400-07b3cc22c215.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2621.987024] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11704b8d-aeb2-49f2-b1cc-2da58e6ea6a6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.002051] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2622.002051] env[63279]: value = "task-2088399" [ 2622.002051] env[63279]: _type = "Task" [ 2622.002051] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2622.010133] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088399, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2622.512633] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088399, 'name': ReconfigVM_Task, 'duration_secs': 0.126776} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2622.512941] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427823', 'volume_id': 'b193588b-4d25-4ebf-8400-07b3cc22c215', 'name': 'volume-b193588b-4d25-4ebf-8400-07b3cc22c215', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '033a1d6d-db52-4902-8994-4d1537ab8658', 'attached_at': '', 'detached_at': '', 'volume_id': 'b193588b-4d25-4ebf-8400-07b3cc22c215', 'serial': 'b193588b-4d25-4ebf-8400-07b3cc22c215'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2622.513521] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0aef68e-4125-4a6f-9cb5-3103e91cb401 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.520769] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2622.520769] env[63279]: value = "task-2088400" [ 2622.520769] env[63279]: _type = "Task" [ 2622.520769] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2622.528665] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088400, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2623.033056] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088400, 'name': Rename_Task, 'duration_secs': 0.13356} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2623.033056] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2623.033056] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4d94691-4520-4987-87db-58190e0e296e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2623.040732] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2623.040732] env[63279]: value = "task-2088401" [ 2623.040732] env[63279]: _type = "Task" [ 2623.040732] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2623.048798] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088401, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2623.440882] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2623.441081] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 2623.551264] env[63279]: DEBUG oslo_vmware.api [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088401, 'name': PowerOnVM_Task, 'duration_secs': 0.445221} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2623.551553] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2623.551763] env[63279]: INFO nova.compute.manager [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Took 8.96 seconds to spawn the instance on the hypervisor. [ 2623.551942] env[63279]: DEBUG nova.compute.manager [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2623.552749] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02625c77-2fed-47b5-a847-b51a1b5688b5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.070892] env[63279]: INFO nova.compute.manager [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Took 16.13 seconds to build instance. [ 2624.512183] env[63279]: DEBUG nova.compute.manager [req-b44be0c0-4a12-4dd6-94de-298faa52e28b req-f9fd3836-5ba5-4b43-9e78-6654e8f1b2b1 service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Received event network-changed-48a709b0-3a67-4115-a178-0872536d2417 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2624.512183] env[63279]: DEBUG nova.compute.manager [req-b44be0c0-4a12-4dd6-94de-298faa52e28b req-f9fd3836-5ba5-4b43-9e78-6654e8f1b2b1 service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Refreshing instance network info cache due to event network-changed-48a709b0-3a67-4115-a178-0872536d2417. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2624.512840] env[63279]: DEBUG oslo_concurrency.lockutils [req-b44be0c0-4a12-4dd6-94de-298faa52e28b req-f9fd3836-5ba5-4b43-9e78-6654e8f1b2b1 service nova] Acquiring lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2624.513127] env[63279]: DEBUG oslo_concurrency.lockutils [req-b44be0c0-4a12-4dd6-94de-298faa52e28b req-f9fd3836-5ba5-4b43-9e78-6654e8f1b2b1 service nova] Acquired lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2624.513438] env[63279]: DEBUG nova.network.neutron [req-b44be0c0-4a12-4dd6-94de-298faa52e28b req-f9fd3836-5ba5-4b43-9e78-6654e8f1b2b1 service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Refreshing network info cache for port 48a709b0-3a67-4115-a178-0872536d2417 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2624.573910] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d0a24156-19b3-449c-ac6a-b0ac67fc425b tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "033a1d6d-db52-4902-8994-4d1537ab8658" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.644s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2625.441754] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2625.461206] env[63279]: DEBUG nova.network.neutron [req-b44be0c0-4a12-4dd6-94de-298faa52e28b req-f9fd3836-5ba5-4b43-9e78-6654e8f1b2b1 service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Updated VIF entry in instance network info cache for port 48a709b0-3a67-4115-a178-0872536d2417. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2625.461586] env[63279]: DEBUG nova.network.neutron [req-b44be0c0-4a12-4dd6-94de-298faa52e28b req-f9fd3836-5ba5-4b43-9e78-6654e8f1b2b1 service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Updating instance_info_cache with network_info: [{"id": "48a709b0-3a67-4115-a178-0872536d2417", "address": "fa:16:3e:75:52:17", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48a709b0-3a", "ovs_interfaceid": "48a709b0-3a67-4115-a178-0872536d2417", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2625.964716] env[63279]: DEBUG oslo_concurrency.lockutils [req-b44be0c0-4a12-4dd6-94de-298faa52e28b req-f9fd3836-5ba5-4b43-9e78-6654e8f1b2b1 service nova] Releasing lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2626.125156] env[63279]: DEBUG nova.compute.manager [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Stashing vm_state: active {{(pid=63279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2626.535949] env[63279]: DEBUG nova.compute.manager [req-0c302e59-107f-4245-8bc1-1ebb668cdf81 req-0eafeffe-b495-4b81-a462-d175671a683e service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Received event network-changed-75bab2c4-7019-4de5-a02b-1b81e9419409 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2626.536181] env[63279]: DEBUG nova.compute.manager [req-0c302e59-107f-4245-8bc1-1ebb668cdf81 req-0eafeffe-b495-4b81-a462-d175671a683e service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Refreshing instance network info cache due to event network-changed-75bab2c4-7019-4de5-a02b-1b81e9419409. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2626.536388] env[63279]: DEBUG oslo_concurrency.lockutils [req-0c302e59-107f-4245-8bc1-1ebb668cdf81 req-0eafeffe-b495-4b81-a462-d175671a683e service nova] Acquiring lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2626.536545] env[63279]: DEBUG oslo_concurrency.lockutils [req-0c302e59-107f-4245-8bc1-1ebb668cdf81 req-0eafeffe-b495-4b81-a462-d175671a683e service nova] Acquired lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2626.536709] env[63279]: DEBUG nova.network.neutron [req-0c302e59-107f-4245-8bc1-1ebb668cdf81 req-0eafeffe-b495-4b81-a462-d175671a683e service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Refreshing network info cache for port 75bab2c4-7019-4de5-a02b-1b81e9419409 {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2626.644542] env[63279]: DEBUG oslo_concurrency.lockutils [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2626.644818] env[63279]: DEBUG oslo_concurrency.lockutils [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2627.149921] env[63279]: INFO nova.compute.claims [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2627.264357] env[63279]: DEBUG nova.network.neutron [req-0c302e59-107f-4245-8bc1-1ebb668cdf81 req-0eafeffe-b495-4b81-a462-d175671a683e service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updated VIF entry in instance network info cache for port 75bab2c4-7019-4de5-a02b-1b81e9419409. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2627.264750] env[63279]: DEBUG nova.network.neutron [req-0c302e59-107f-4245-8bc1-1ebb668cdf81 req-0eafeffe-b495-4b81-a462-d175671a683e service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance_info_cache with network_info: [{"id": "75bab2c4-7019-4de5-a02b-1b81e9419409", "address": "fa:16:3e:33:b2:0d", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75bab2c4-70", "ovs_interfaceid": "75bab2c4-7019-4de5-a02b-1b81e9419409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2627.658014] env[63279]: INFO nova.compute.resource_tracker [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating resource usage from migration ac3aa168-1428-4675-89a9-7eb051bc8509 [ 2627.736470] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5aeb4ae-119b-4f5a-9d79-1d3d37cf2bad {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2627.744346] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cef2b1-c669-4001-b3e6-67eeba8bd514 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2628.421091] env[63279]: DEBUG oslo_concurrency.lockutils [req-0c302e59-107f-4245-8bc1-1ebb668cdf81 req-0eafeffe-b495-4b81-a462-d175671a683e service nova] Releasing lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2628.422557] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2402e23-9e98-4ccc-8793-1c02eedbf80b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2628.430925] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf68abb-9f38-4d06-809b-380fc0965556 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2628.444710] env[63279]: DEBUG nova.compute.provider_tree [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2628.948341] env[63279]: DEBUG nova.scheduler.client.report [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2629.436868] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2629.440478] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2629.440639] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 2629.440758] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 2629.453919] env[63279]: DEBUG oslo_concurrency.lockutils [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.809s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2629.454154] env[63279]: INFO nova.compute.manager [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Migrating [ 2629.968872] env[63279]: DEBUG oslo_concurrency.lockutils [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2629.969294] env[63279]: DEBUG oslo_concurrency.lockutils [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2629.969294] env[63279]: DEBUG nova.network.neutron [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2629.972111] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2629.972254] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquired lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2629.972392] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Forcefully refreshing network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2629.972541] env[63279]: DEBUG nova.objects.instance [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lazy-loading 'info_cache' on Instance uuid befc985f-68e2-4a04-8de0-9ca9bb3fa504 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2630.673367] env[63279]: DEBUG nova.network.neutron [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance_info_cache with network_info: [{"id": "75bab2c4-7019-4de5-a02b-1b81e9419409", "address": "fa:16:3e:33:b2:0d", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75bab2c4-70", "ovs_interfaceid": "75bab2c4-7019-4de5-a02b-1b81e9419409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2631.175834] env[63279]: DEBUG oslo_concurrency.lockutils [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2631.704815] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Updating instance_info_cache with network_info: [{"id": "48a709b0-3a67-4115-a178-0872536d2417", "address": "fa:16:3e:75:52:17", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48a709b0-3a", "ovs_interfaceid": "48a709b0-3a67-4115-a178-0872536d2417", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2632.207561] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Releasing lock "refresh_cache-befc985f-68e2-4a04-8de0-9ca9bb3fa504" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2632.207917] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Updated the network info_cache for instance {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10518}} [ 2632.207954] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2632.208122] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2632.208291] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2632.690930] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e48a7e-9fb9-468b-bcda-85503bdc1ae2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2632.711478] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2632.711723] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2632.711903] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2632.712077] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2632.712457] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance '033a1d6d-db52-4902-8994-4d1537ab8658' progress to 0 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2632.716312] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ee44d2-dbb6-4f5b-a5f5-588877d046a3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2632.726122] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c1451f-a4eb-4739-b7b0-57c8eda25df5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2632.740874] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a98516f-4712-40b6-bb55-bc87087f7d43 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2632.748397] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30b37b3-c226-475b-aec4-0308d39a94be {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2632.787813] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180758MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2632.787979] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2632.788193] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2633.220926] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2633.221281] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4bb3ee8-999e-4217-8ea8-96e6dff7aace {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2633.229281] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2633.229281] env[63279]: value = "task-2088402" [ 2633.229281] env[63279]: _type = "Task" [ 2633.229281] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2633.237639] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088402, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2633.739624] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088402, 'name': PowerOffVM_Task, 'duration_secs': 0.157935} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2633.739624] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2633.739624] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance '033a1d6d-db52-4902-8994-4d1537ab8658' progress to 17 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2633.797524] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Applying migration context for instance 033a1d6d-db52-4902-8994-4d1537ab8658 as it has an incoming, in-progress migration ac3aa168-1428-4675-89a9-7eb051bc8509. Migration status is migrating {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2633.798133] env[63279]: INFO nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating resource usage from migration ac3aa168-1428-4675-89a9-7eb051bc8509 [ 2633.815198] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance befc985f-68e2-4a04-8de0-9ca9bb3fa504 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2633.815337] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 30ac4320-5ee0-424b-9bbb-e2d53277be80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2633.815459] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance cc0d3870-41fa-4cd4-a16d-e52e705f29a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2633.815578] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 70f2ea82-1b68-478e-8195-f9754eb051ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2633.815717] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Migration ac3aa168-1428-4675-89a9-7eb051bc8509 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2633.815853] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 033a1d6d-db52-4902-8994-4d1537ab8658 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2633.816050] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2633.816192] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2633.889493] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa20ef61-297a-4363-a272-4bb9aab49896 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2633.897502] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0369c9d8-c8bb-4d3a-8f23-ca1c58124bcc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2633.927419] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa2419b-4397-4eff-a248-8ef18ee54f1e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2633.934512] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27428f4-2289-433f-8fac-cb539a3160e2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2633.948218] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2634.245973] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2634.246333] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2634.246333] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2634.246509] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2634.246662] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2634.246815] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2634.247035] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2634.247205] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2634.247379] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2634.247548] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2634.247724] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2634.252896] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec21cfaa-9069-42f8-a773-70a274dc6502 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2634.270155] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2634.270155] env[63279]: value = "task-2088403" [ 2634.270155] env[63279]: _type = "Task" [ 2634.270155] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2634.278354] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088403, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2634.451726] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2634.781281] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088403, 'name': ReconfigVM_Task, 'duration_secs': 0.154971} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2634.781615] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance '033a1d6d-db52-4902-8994-4d1537ab8658' progress to 33 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2634.956995] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2634.957162] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.169s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2635.287653] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2635.288082] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2635.288082] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2635.288254] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2635.288404] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2635.288641] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2635.288949] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2635.289144] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2635.289331] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2635.289497] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2635.289731] env[63279]: DEBUG nova.virt.hardware [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2635.295115] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2635.295413] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8219891-981b-46ac-b627-aa8a90bdcd59 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2635.314999] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2635.314999] env[63279]: value = "task-2088404" [ 2635.314999] env[63279]: _type = "Task" [ 2635.314999] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2635.323111] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088404, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2635.825958] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088404, 'name': ReconfigVM_Task, 'duration_secs': 0.150321} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2635.826251] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2635.827020] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1886e2b5-4f30-483b-890f-592fd3fd1310 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2635.848529] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] volume-b193588b-4d25-4ebf-8400-07b3cc22c215/volume-b193588b-4d25-4ebf-8400-07b3cc22c215.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2635.848759] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51e78557-947b-41a6-bf44-2dde28708509 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2635.866894] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2635.866894] env[63279]: value = "task-2088405" [ 2635.866894] env[63279]: _type = "Task" [ 2635.866894] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2635.874635] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088405, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2636.376531] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088405, 'name': ReconfigVM_Task, 'duration_secs': 0.246719} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2636.376856] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Reconfigured VM instance instance-0000007b to attach disk [datastore1] volume-b193588b-4d25-4ebf-8400-07b3cc22c215/volume-b193588b-4d25-4ebf-8400-07b3cc22c215.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2636.377094] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance '033a1d6d-db52-4902-8994-4d1537ab8658' progress to 50 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2636.883204] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9238c314-0bda-4571-a096-8e371aeec901 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2636.903722] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a28340b-fd62-472a-8678-2dc22373131e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2636.921170] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance '033a1d6d-db52-4902-8994-4d1537ab8658' progress to 67 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2637.189581] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2637.189835] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2638.564926] env[63279]: DEBUG nova.network.neutron [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Port 75bab2c4-7019-4de5-a02b-1b81e9419409 binding to destination host cpu-1 is already ACTIVE {{(pid=63279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2639.585607] env[63279]: DEBUG oslo_concurrency.lockutils [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "033a1d6d-db52-4902-8994-4d1537ab8658-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2639.585943] env[63279]: DEBUG oslo_concurrency.lockutils [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "033a1d6d-db52-4902-8994-4d1537ab8658-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2639.586013] env[63279]: DEBUG oslo_concurrency.lockutils [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "033a1d6d-db52-4902-8994-4d1537ab8658-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2640.623897] env[63279]: DEBUG oslo_concurrency.lockutils [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2640.624167] env[63279]: DEBUG oslo_concurrency.lockutils [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2640.624281] env[63279]: DEBUG nova.network.neutron [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2641.332186] env[63279]: DEBUG nova.network.neutron [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance_info_cache with network_info: [{"id": "75bab2c4-7019-4de5-a02b-1b81e9419409", "address": "fa:16:3e:33:b2:0d", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75bab2c4-70", "ovs_interfaceid": "75bab2c4-7019-4de5-a02b-1b81e9419409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2641.835703] env[63279]: DEBUG oslo_concurrency.lockutils [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2642.346214] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74bf0bee-0c29-4d9f-b24d-eeca60b74817 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2642.353357] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8251fc-8627-459e-a360-9bb86392ce95 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2642.436471] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2643.450224] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6644c651-bdf0-4035-81cf-3f5c82415eeb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2643.469235] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a177a09a-23c0-4e38-88a4-b659ee15c659 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2643.475939] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance '033a1d6d-db52-4902-8994-4d1537ab8658' progress to 83 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2643.982510] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2643.982882] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d70c29b-6298-4a9c-b2ab-777fb832dc1d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2643.991347] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2643.991347] env[63279]: value = "task-2088406" [ 2643.991347] env[63279]: _type = "Task" [ 2643.991347] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2643.999134] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088406, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2644.501588] env[63279]: DEBUG oslo_vmware.api [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088406, 'name': PowerOnVM_Task, 'duration_secs': 0.351608} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2644.501944] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2644.502083] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-13f38629-fd42-46a6-8f9a-9cb01db9c5a8 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance '033a1d6d-db52-4902-8994-4d1537ab8658' progress to 100 {{(pid=63279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2646.622096] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "033a1d6d-db52-4902-8994-4d1537ab8658" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2646.622494] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "033a1d6d-db52-4902-8994-4d1537ab8658" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2646.622620] env[63279]: DEBUG nova.compute.manager [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Going to confirm migration 10 {{(pid=63279) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 2647.160380] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2647.160583] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquired lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2647.160772] env[63279]: DEBUG nova.network.neutron [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2647.160964] env[63279]: DEBUG nova.objects.instance [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lazy-loading 'info_cache' on Instance uuid 033a1d6d-db52-4902-8994-4d1537ab8658 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2648.383450] env[63279]: DEBUG nova.network.neutron [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance_info_cache with network_info: [{"id": "75bab2c4-7019-4de5-a02b-1b81e9419409", "address": "fa:16:3e:33:b2:0d", "network": {"id": "1e35690a-f100-41b7-a461-7334966ef784", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1516696428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9427c264e8e41998f579af352cb48cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75bab2c4-70", "ovs_interfaceid": "75bab2c4-7019-4de5-a02b-1b81e9419409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2648.886523] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Releasing lock "refresh_cache-033a1d6d-db52-4902-8994-4d1537ab8658" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2648.886804] env[63279]: DEBUG nova.objects.instance [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lazy-loading 'migration_context' on Instance uuid 033a1d6d-db52-4902-8994-4d1537ab8658 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2649.389508] env[63279]: DEBUG nova.objects.base [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Object Instance<033a1d6d-db52-4902-8994-4d1537ab8658> lazy-loaded attributes: info_cache,migration_context {{(pid=63279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2649.390525] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eecc103-fe47-4b85-b568-77b73c5dfa2b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2649.410634] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f6f6e3a-79df-4482-8518-45e14569a094 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2649.415776] env[63279]: DEBUG oslo_vmware.api [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2649.415776] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a830ee-4193-c281-5373-4e964dc2f7be" [ 2649.415776] env[63279]: _type = "Task" [ 2649.415776] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2649.423215] env[63279]: DEBUG oslo_vmware.api [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a830ee-4193-c281-5373-4e964dc2f7be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2649.617136] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "70f2ea82-1b68-478e-8195-f9754eb051ae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2649.617420] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "70f2ea82-1b68-478e-8195-f9754eb051ae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2649.617639] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "70f2ea82-1b68-478e-8195-f9754eb051ae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2649.617836] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "70f2ea82-1b68-478e-8195-f9754eb051ae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2649.618023] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "70f2ea82-1b68-478e-8195-f9754eb051ae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2649.620156] env[63279]: INFO nova.compute.manager [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Terminating instance [ 2649.925579] env[63279]: DEBUG oslo_vmware.api [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a830ee-4193-c281-5373-4e964dc2f7be, 'name': SearchDatastore_Task, 'duration_secs': 0.008513} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2649.925874] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2649.926135] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2650.124319] env[63279]: DEBUG nova.compute.manager [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2650.124537] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2650.125448] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa0610a-4497-455f-a5df-a76c7ec5c326 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2650.133158] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2650.133393] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ada38eaf-f8bb-4e38-b69b-2eadee1a0c90 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2650.139449] env[63279]: DEBUG oslo_vmware.api [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2650.139449] env[63279]: value = "task-2088407" [ 2650.139449] env[63279]: _type = "Task" [ 2650.139449] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2650.147463] env[63279]: DEBUG oslo_vmware.api [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088407, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2650.504784] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccbc556-82a7-4c3f-a456-cf300bb8adf2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2650.512185] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7893e05-84d8-4cd1-ab0a-4d687f38aba7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2650.540840] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd54bb63-b173-4b44-9736-89353a7a2671 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2650.547378] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72958ec-2fdc-44ee-a570-ce074b1c7914 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2650.560864] env[63279]: DEBUG nova.compute.provider_tree [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2650.648612] env[63279]: DEBUG oslo_vmware.api [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088407, 'name': PowerOffVM_Task, 'duration_secs': 0.2009} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2650.648879] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2650.649064] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2650.649302] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09658702-857c-45ad-8d21-8564d4addfc8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.064385] env[63279]: DEBUG nova.scheduler.client.report [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2652.076097] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.150s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2652.629195] env[63279]: INFO nova.scheduler.client.report [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleted allocation for migration ac3aa168-1428-4675-89a9-7eb051bc8509 [ 2652.970717] env[63279]: INFO nova.compute.manager [None req-02e1cd77-4385-4a30-8e1c-fdb627661040 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Get console output [ 2652.970938] env[63279]: WARNING nova.virt.vmwareapi.driver [None req-02e1cd77-4385-4a30-8e1c-fdb627661040 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] The console log is missing. Check your VSPC configuration [ 2653.134707] env[63279]: DEBUG oslo_concurrency.lockutils [None req-11350076-19df-49a9-a8f4-1286ef93cb17 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "033a1d6d-db52-4902-8994-4d1537ab8658" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.512s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2654.955129] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2654.955515] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2654.955515] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Deleting the datastore file [datastore1] 70f2ea82-1b68-478e-8195-f9754eb051ae {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2654.955677] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e94de87d-698d-4f88-9f6a-9c2d811b646a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2654.961746] env[63279]: DEBUG oslo_vmware.api [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2654.961746] env[63279]: value = "task-2088409" [ 2654.961746] env[63279]: _type = "Task" [ 2654.961746] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2654.969677] env[63279]: DEBUG oslo_vmware.api [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2655.471267] env[63279]: DEBUG oslo_vmware.api [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088409, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128172} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2655.471523] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2655.471743] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2655.471932] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2655.472130] env[63279]: INFO nova.compute.manager [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Took 5.35 seconds to destroy the instance on the hypervisor. [ 2655.472403] env[63279]: DEBUG oslo.service.loopingcall [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2655.472634] env[63279]: DEBUG nova.compute.manager [-] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2655.472737] env[63279]: DEBUG nova.network.neutron [-] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2655.903232] env[63279]: DEBUG nova.compute.manager [req-9b1e331a-0b7f-4973-af24-40f5f0d222a7 req-392e1a0b-11cd-4254-b21f-e544b3962c35 service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Received event network-vif-deleted-7d6d4bb4-a996-4572-a198-cad6b5f0105c {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2655.903519] env[63279]: INFO nova.compute.manager [req-9b1e331a-0b7f-4973-af24-40f5f0d222a7 req-392e1a0b-11cd-4254-b21f-e544b3962c35 service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Neutron deleted interface 7d6d4bb4-a996-4572-a198-cad6b5f0105c; detaching it from the instance and deleting it from the info cache [ 2655.903694] env[63279]: DEBUG nova.network.neutron [req-9b1e331a-0b7f-4973-af24-40f5f0d222a7 req-392e1a0b-11cd-4254-b21f-e544b3962c35 service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2656.377739] env[63279]: DEBUG nova.network.neutron [-] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2656.406544] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2ba673e-42b6-4ba1-aa86-a8a6876cee83 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.416310] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d867fac-d0e1-4987-a0a1-df2fbb061413 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.443297] env[63279]: DEBUG nova.compute.manager [req-9b1e331a-0b7f-4973-af24-40f5f0d222a7 req-392e1a0b-11cd-4254-b21f-e544b3962c35 service nova] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Detach interface failed, port_id=7d6d4bb4-a996-4572-a198-cad6b5f0105c, reason: Instance 70f2ea82-1b68-478e-8195-f9754eb051ae could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2656.881405] env[63279]: INFO nova.compute.manager [-] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Took 1.41 seconds to deallocate network for instance. [ 2657.387913] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2657.388297] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2657.388435] env[63279]: DEBUG nova.objects.instance [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lazy-loading 'resources' on Instance uuid 70f2ea82-1b68-478e-8195-f9754eb051ae {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2657.966541] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712623ee-5cf3-47ad-9825-eff34d7e98d4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.974434] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612778f4-190c-4ea8-ab50-410dbe152164 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.003377] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ccf710-8e7d-49f2-bcb3-5025992a1075 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.010740] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea4ffb1-959a-4026-92f8-a28a0c52809e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.025396] env[63279]: DEBUG nova.compute.provider_tree [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2658.529126] env[63279]: DEBUG nova.scheduler.client.report [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2659.034432] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.646s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2659.056440] env[63279]: INFO nova.scheduler.client.report [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Deleted allocations for instance 70f2ea82-1b68-478e-8195-f9754eb051ae [ 2659.565056] env[63279]: DEBUG oslo_concurrency.lockutils [None req-bc92c1ec-5f29-46e9-b5b4-d86ff8ffe930 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "70f2ea82-1b68-478e-8195-f9754eb051ae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.947s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2660.981474] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2660.981832] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2661.485165] env[63279]: INFO nova.compute.manager [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Detaching volume 2a14c4d9-b71a-4abd-ba55-429105dd6867 [ 2661.516808] env[63279]: INFO nova.virt.block_device [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Attempting to driver detach volume 2a14c4d9-b71a-4abd-ba55-429105dd6867 from mountpoint /dev/sdb [ 2661.517080] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2661.517270] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427822', 'volume_id': '2a14c4d9-b71a-4abd-ba55-429105dd6867', 'name': 'volume-2a14c4d9-b71a-4abd-ba55-429105dd6867', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '30ac4320-5ee0-424b-9bbb-e2d53277be80', 'attached_at': '', 'detached_at': '', 'volume_id': '2a14c4d9-b71a-4abd-ba55-429105dd6867', 'serial': '2a14c4d9-b71a-4abd-ba55-429105dd6867'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2661.518157] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8683d6-4b13-4024-b883-796a5d721926 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2661.539441] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22449020-67dd-46d4-824c-9d0767ae1855 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2661.546471] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb10528-d18b-4b56-a669-8558e2a3eee2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2661.567030] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7655747f-7181-43a8-84c4-0db8bd7d90ae {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2661.581054] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] The volume has not been displaced from its original location: [datastore1] volume-2a14c4d9-b71a-4abd-ba55-429105dd6867/volume-2a14c4d9-b71a-4abd-ba55-429105dd6867.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2661.586450] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Reconfiguring VM instance instance-00000078 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2661.586704] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4201265f-0483-41bf-bc45-f985eb80db2c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2661.604660] env[63279]: DEBUG oslo_vmware.api [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2661.604660] env[63279]: value = "task-2088410" [ 2661.604660] env[63279]: _type = "Task" [ 2661.604660] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2661.611889] env[63279]: DEBUG oslo_vmware.api [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088410, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2662.113901] env[63279]: DEBUG oslo_vmware.api [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088410, 'name': ReconfigVM_Task, 'duration_secs': 0.217704} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2662.114272] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Reconfigured VM instance instance-00000078 to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2662.118715] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6fe840a-1dbb-45e6-acde-7b6c94ec590c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2662.133619] env[63279]: DEBUG oslo_vmware.api [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2662.133619] env[63279]: value = "task-2088411" [ 2662.133619] env[63279]: _type = "Task" [ 2662.133619] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2662.141308] env[63279]: DEBUG oslo_vmware.api [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088411, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2662.643028] env[63279]: DEBUG oslo_vmware.api [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088411, 'name': ReconfigVM_Task, 'duration_secs': 0.139393} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2662.643358] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427822', 'volume_id': '2a14c4d9-b71a-4abd-ba55-429105dd6867', 'name': 'volume-2a14c4d9-b71a-4abd-ba55-429105dd6867', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '30ac4320-5ee0-424b-9bbb-e2d53277be80', 'attached_at': '', 'detached_at': '', 'volume_id': '2a14c4d9-b71a-4abd-ba55-429105dd6867', 'serial': '2a14c4d9-b71a-4abd-ba55-429105dd6867'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2663.182829] env[63279]: DEBUG nova.objects.instance [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lazy-loading 'flavor' on Instance uuid 30ac4320-5ee0-424b-9bbb-e2d53277be80 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2664.191099] env[63279]: DEBUG oslo_concurrency.lockutils [None req-a7425f21-fb7f-42b8-ac3a-4cd6f50e4a12 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.209s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2665.217252] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2665.217597] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2665.217749] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "30ac4320-5ee0-424b-9bbb-e2d53277be80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2665.217947] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "30ac4320-5ee0-424b-9bbb-e2d53277be80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2665.218146] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "30ac4320-5ee0-424b-9bbb-e2d53277be80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2665.220430] env[63279]: INFO nova.compute.manager [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Terminating instance [ 2665.724578] env[63279]: DEBUG nova.compute.manager [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2665.724815] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2665.725916] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28687c60-ae37-4ad9-a9dc-4f5579152117 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2665.735488] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2665.735717] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f63049b3-2af5-4174-a9c4-2a167e960283 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2665.741654] env[63279]: DEBUG oslo_vmware.api [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2665.741654] env[63279]: value = "task-2088412" [ 2665.741654] env[63279]: _type = "Task" [ 2665.741654] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2665.749252] env[63279]: DEBUG oslo_vmware.api [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088412, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2666.251672] env[63279]: DEBUG oslo_vmware.api [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088412, 'name': PowerOffVM_Task, 'duration_secs': 0.162899} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2666.252014] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2666.252140] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2666.252384] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22a2bf02-c689-4d91-bdc9-bb5bab946977 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2666.352839] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2666.353071] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2666.353261] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Deleting the datastore file [datastore1] 30ac4320-5ee0-424b-9bbb-e2d53277be80 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2666.353519] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a521c0d8-2d3c-46a4-a51e-b0d54c326d9a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2666.360582] env[63279]: DEBUG oslo_vmware.api [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2666.360582] env[63279]: value = "task-2088414" [ 2666.360582] env[63279]: _type = "Task" [ 2666.360582] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2666.367655] env[63279]: DEBUG oslo_vmware.api [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088414, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2666.870538] env[63279]: DEBUG oslo_vmware.api [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088414, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12476} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2666.870795] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2666.870979] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2666.871176] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2666.871544] env[63279]: INFO nova.compute.manager [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2666.871629] env[63279]: DEBUG oslo.service.loopingcall [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2666.871787] env[63279]: DEBUG nova.compute.manager [-] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2666.871887] env[63279]: DEBUG nova.network.neutron [-] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2667.304776] env[63279]: DEBUG nova.compute.manager [req-38105d73-99d3-4199-9e76-aabc4060afb4 req-8b1f59d7-2172-4533-a525-0080fb66a34e service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Received event network-vif-deleted-0db48ddb-8067-45e9-a073-cbfb47873a8b {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2667.305030] env[63279]: INFO nova.compute.manager [req-38105d73-99d3-4199-9e76-aabc4060afb4 req-8b1f59d7-2172-4533-a525-0080fb66a34e service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Neutron deleted interface 0db48ddb-8067-45e9-a073-cbfb47873a8b; detaching it from the instance and deleting it from the info cache [ 2667.305172] env[63279]: DEBUG nova.network.neutron [req-38105d73-99d3-4199-9e76-aabc4060afb4 req-8b1f59d7-2172-4533-a525-0080fb66a34e service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2667.786562] env[63279]: DEBUG nova.network.neutron [-] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2667.807852] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-896985ba-475b-41e8-8a24-c674433b6e6e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2667.817493] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7dbf0d7-b5d7-48f9-abca-9fcbc609c14d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2667.841879] env[63279]: DEBUG nova.compute.manager [req-38105d73-99d3-4199-9e76-aabc4060afb4 req-8b1f59d7-2172-4533-a525-0080fb66a34e service nova] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Detach interface failed, port_id=0db48ddb-8067-45e9-a073-cbfb47873a8b, reason: Instance 30ac4320-5ee0-424b-9bbb-e2d53277be80 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2668.289062] env[63279]: INFO nova.compute.manager [-] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Took 1.42 seconds to deallocate network for instance. [ 2668.795331] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2668.795666] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2668.795815] env[63279]: DEBUG nova.objects.instance [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lazy-loading 'resources' on Instance uuid 30ac4320-5ee0-424b-9bbb-e2d53277be80 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2669.359507] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3f4ca8-70b4-4717-b0e8-b3c7e4315703 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2669.368643] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-563138f1-020c-42b3-8dcf-57da7dceeef2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2669.397093] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0e26dd-1f0b-414e-b37d-8d5e66253dbb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2669.403857] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f34810e-cb43-41aa-ab6b-fb7e5a69be3c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2669.416731] env[63279]: DEBUG nova.compute.provider_tree [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2669.936219] env[63279]: ERROR nova.scheduler.client.report [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [req-c1bf27e9-e721-42b5-bc22-6801b464c742] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0ba7c625-a0fc-4d3c-b804-196d00f00137. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c1bf27e9-e721-42b5-bc22-6801b464c742"}]} [ 2669.952622] env[63279]: DEBUG nova.scheduler.client.report [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Refreshing inventories for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2669.964875] env[63279]: DEBUG nova.scheduler.client.report [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating ProviderTree inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2669.965118] env[63279]: DEBUG nova.compute.provider_tree [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2669.975748] env[63279]: DEBUG nova.scheduler.client.report [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Refreshing aggregate associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, aggregates: None {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2669.991687] env[63279]: DEBUG nova.scheduler.client.report [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Refreshing trait associations for resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2670.039470] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89be12b6-0547-4811-ace3-e9c85c9207ff {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2670.046812] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1595a07-42e1-4c16-bf61-c212e551ebb6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2670.076717] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d3e7cf-1604-4789-b827-567a565de699 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2670.083827] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30627578-4992-410a-9ca1-7f25b603ddc7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2670.096719] env[63279]: DEBUG nova.compute.provider_tree [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2670.627068] env[63279]: DEBUG nova.scheduler.client.report [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updated inventory for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with generation 192 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2670.627349] env[63279]: DEBUG nova.compute.provider_tree [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating resource provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 generation from 192 to 193 during operation: update_inventory {{(pid=63279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2670.627532] env[63279]: DEBUG nova.compute.provider_tree [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Updating inventory in ProviderTree for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2671.132364] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.337s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2671.152888] env[63279]: INFO nova.scheduler.client.report [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Deleted allocations for instance 30ac4320-5ee0-424b-9bbb-e2d53277be80 [ 2671.661134] env[63279]: DEBUG oslo_concurrency.lockutils [None req-d87491ea-25f4-483d-acfb-8b60748f1aac tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "30ac4320-5ee0-424b-9bbb-e2d53277be80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.443s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2673.128746] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2673.129087] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2673.631827] env[63279]: DEBUG nova.compute.manager [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2674.152749] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2674.153042] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2674.154648] env[63279]: INFO nova.compute.claims [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2675.220513] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef4f5ad-8600-41d3-a8ec-790e0bd1b661 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.228050] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4958ba34-0d23-4d19-93ea-47511918af0e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.257371] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01a7b22-ec5e-44c0-8901-831a0930a2f9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.263913] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c43cd64-91d5-4f93-b585-49fa0163cda8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.276457] env[63279]: DEBUG nova.compute.provider_tree [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2675.779859] env[63279]: DEBUG nova.scheduler.client.report [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2676.285048] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.132s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2676.285561] env[63279]: DEBUG nova.compute.manager [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2676.790554] env[63279]: DEBUG nova.compute.utils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2676.792371] env[63279]: DEBUG nova.compute.manager [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2676.792560] env[63279]: DEBUG nova.network.neutron [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2676.837408] env[63279]: DEBUG nova.policy [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '233f5d339e7b438e910eb03d33891c16', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5fa421934ecd4054a70528644a40349e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2677.092662] env[63279]: DEBUG nova.network.neutron [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Successfully created port: f18dd625-8e75-4f9c-8ac2-1e7ea380ffef {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2677.295704] env[63279]: DEBUG nova.compute.manager [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2678.308817] env[63279]: DEBUG nova.compute.manager [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2678.336217] env[63279]: DEBUG nova.virt.hardware [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2678.336470] env[63279]: DEBUG nova.virt.hardware [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2678.336631] env[63279]: DEBUG nova.virt.hardware [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2678.336815] env[63279]: DEBUG nova.virt.hardware [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2678.336963] env[63279]: DEBUG nova.virt.hardware [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2678.337128] env[63279]: DEBUG nova.virt.hardware [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2678.337418] env[63279]: DEBUG nova.virt.hardware [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2678.337502] env[63279]: DEBUG nova.virt.hardware [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2678.337661] env[63279]: DEBUG nova.virt.hardware [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2678.337822] env[63279]: DEBUG nova.virt.hardware [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2678.337995] env[63279]: DEBUG nova.virt.hardware [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2678.338861] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd1a5c4-baaa-4b7b-b873-579476304bdf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2678.346804] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c376b905-24a6-4420-87b0-d71b3b687722 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2678.459664] env[63279]: DEBUG nova.compute.manager [req-87636daa-f8e2-4738-8e37-fb1f3ba43d13 req-15547523-8050-45bf-a105-4b83e04fc1ae service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Received event network-vif-plugged-f18dd625-8e75-4f9c-8ac2-1e7ea380ffef {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2678.459899] env[63279]: DEBUG oslo_concurrency.lockutils [req-87636daa-f8e2-4738-8e37-fb1f3ba43d13 req-15547523-8050-45bf-a105-4b83e04fc1ae service nova] Acquiring lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2678.460140] env[63279]: DEBUG oslo_concurrency.lockutils [req-87636daa-f8e2-4738-8e37-fb1f3ba43d13 req-15547523-8050-45bf-a105-4b83e04fc1ae service nova] Lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2678.460310] env[63279]: DEBUG oslo_concurrency.lockutils [req-87636daa-f8e2-4738-8e37-fb1f3ba43d13 req-15547523-8050-45bf-a105-4b83e04fc1ae service nova] Lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2678.460479] env[63279]: DEBUG nova.compute.manager [req-87636daa-f8e2-4738-8e37-fb1f3ba43d13 req-15547523-8050-45bf-a105-4b83e04fc1ae service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] No waiting events found dispatching network-vif-plugged-f18dd625-8e75-4f9c-8ac2-1e7ea380ffef {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2678.460646] env[63279]: WARNING nova.compute.manager [req-87636daa-f8e2-4738-8e37-fb1f3ba43d13 req-15547523-8050-45bf-a105-4b83e04fc1ae service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Received unexpected event network-vif-plugged-f18dd625-8e75-4f9c-8ac2-1e7ea380ffef for instance with vm_state building and task_state spawning. [ 2678.542841] env[63279]: DEBUG nova.network.neutron [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Successfully updated port: f18dd625-8e75-4f9c-8ac2-1e7ea380ffef {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2679.046236] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "refresh_cache-2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2679.046523] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired lock "refresh_cache-2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2679.046769] env[63279]: DEBUG nova.network.neutron [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2679.578163] env[63279]: DEBUG nova.network.neutron [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2679.715543] env[63279]: DEBUG nova.network.neutron [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Updating instance_info_cache with network_info: [{"id": "f18dd625-8e75-4f9c-8ac2-1e7ea380ffef", "address": "fa:16:3e:ca:99:1f", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf18dd625-8e", "ovs_interfaceid": "f18dd625-8e75-4f9c-8ac2-1e7ea380ffef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2679.901394] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "033a1d6d-db52-4902-8994-4d1537ab8658" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2679.901667] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "033a1d6d-db52-4902-8994-4d1537ab8658" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2679.901887] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "033a1d6d-db52-4902-8994-4d1537ab8658-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2679.902091] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "033a1d6d-db52-4902-8994-4d1537ab8658-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2679.902274] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "033a1d6d-db52-4902-8994-4d1537ab8658-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2679.904439] env[63279]: INFO nova.compute.manager [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Terminating instance [ 2680.218025] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Releasing lock "refresh_cache-2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2680.218415] env[63279]: DEBUG nova.compute.manager [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Instance network_info: |[{"id": "f18dd625-8e75-4f9c-8ac2-1e7ea380ffef", "address": "fa:16:3e:ca:99:1f", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf18dd625-8e", "ovs_interfaceid": "f18dd625-8e75-4f9c-8ac2-1e7ea380ffef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2680.218852] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:99:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e41070eb-3ac1-4ca9-a3d0-fd65893a97de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f18dd625-8e75-4f9c-8ac2-1e7ea380ffef', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2680.226295] env[63279]: DEBUG oslo.service.loopingcall [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2680.226518] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2680.226745] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5cf99c4-d899-42b2-9abe-fa526132ad41 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2680.247918] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2680.247918] env[63279]: value = "task-2088415" [ 2680.247918] env[63279]: _type = "Task" [ 2680.247918] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2680.256938] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088415, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2680.408231] env[63279]: DEBUG nova.compute.manager [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2680.408591] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2680.408929] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b80c14e1-5715-443c-a9ff-9f8b91a71252 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2680.416950] env[63279]: DEBUG oslo_vmware.api [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2680.416950] env[63279]: value = "task-2088416" [ 2680.416950] env[63279]: _type = "Task" [ 2680.416950] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2680.425133] env[63279]: DEBUG oslo_vmware.api [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2680.492306] env[63279]: DEBUG nova.compute.manager [req-663da875-b43b-4966-a65f-61508be90a34 req-e7546e84-02ab-4635-9562-96bbbdb5fee4 service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Received event network-changed-f18dd625-8e75-4f9c-8ac2-1e7ea380ffef {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2680.492524] env[63279]: DEBUG nova.compute.manager [req-663da875-b43b-4966-a65f-61508be90a34 req-e7546e84-02ab-4635-9562-96bbbdb5fee4 service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Refreshing instance network info cache due to event network-changed-f18dd625-8e75-4f9c-8ac2-1e7ea380ffef. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2680.492741] env[63279]: DEBUG oslo_concurrency.lockutils [req-663da875-b43b-4966-a65f-61508be90a34 req-e7546e84-02ab-4635-9562-96bbbdb5fee4 service nova] Acquiring lock "refresh_cache-2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2680.492892] env[63279]: DEBUG oslo_concurrency.lockutils [req-663da875-b43b-4966-a65f-61508be90a34 req-e7546e84-02ab-4635-9562-96bbbdb5fee4 service nova] Acquired lock "refresh_cache-2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2680.493091] env[63279]: DEBUG nova.network.neutron [req-663da875-b43b-4966-a65f-61508be90a34 req-e7546e84-02ab-4635-9562-96bbbdb5fee4 service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Refreshing network info cache for port f18dd625-8e75-4f9c-8ac2-1e7ea380ffef {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2680.758071] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088415, 'name': CreateVM_Task, 'duration_secs': 0.388493} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2680.758385] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2680.758912] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2680.759144] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2680.759504] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2680.759760] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aafc4ef3-de96-4bed-acf4-95c3ce74972c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2680.763863] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2680.763863] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52adbcca-65bd-294a-e4d3-7f2b8119e0c0" [ 2680.763863] env[63279]: _type = "Task" [ 2680.763863] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2680.771306] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52adbcca-65bd-294a-e4d3-7f2b8119e0c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2680.926100] env[63279]: DEBUG oslo_vmware.api [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088416, 'name': PowerOffVM_Task, 'duration_secs': 0.167791} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2680.926373] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2680.926572] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2680.926768] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427823', 'volume_id': 'b193588b-4d25-4ebf-8400-07b3cc22c215', 'name': 'volume-b193588b-4d25-4ebf-8400-07b3cc22c215', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '033a1d6d-db52-4902-8994-4d1537ab8658', 'attached_at': '2025-01-13T18:05:52.000000', 'detached_at': '', 'volume_id': 'b193588b-4d25-4ebf-8400-07b3cc22c215', 'serial': 'b193588b-4d25-4ebf-8400-07b3cc22c215'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2680.927508] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6035e9b5-c382-44e0-944e-b1aee473ce24 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2680.944673] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a142830-87b7-477e-bcbc-bdeabb959d8c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2680.950566] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa3cc7c4-5945-45a2-9f6b-5ef2a2c6b08d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2680.967455] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480141f8-18e4-42d2-bae8-cc706e9cac9c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2680.981236] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] The volume has not been displaced from its original location: [datastore1] volume-b193588b-4d25-4ebf-8400-07b3cc22c215/volume-b193588b-4d25-4ebf-8400-07b3cc22c215.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2680.986323] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2680.986574] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d2b7c31-bb9b-41fd-9007-f06e7d42860e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2681.006229] env[63279]: DEBUG oslo_vmware.api [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2681.006229] env[63279]: value = "task-2088417" [ 2681.006229] env[63279]: _type = "Task" [ 2681.006229] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2681.015022] env[63279]: DEBUG oslo_vmware.api [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088417, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2681.183559] env[63279]: DEBUG nova.network.neutron [req-663da875-b43b-4966-a65f-61508be90a34 req-e7546e84-02ab-4635-9562-96bbbdb5fee4 service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Updated VIF entry in instance network info cache for port f18dd625-8e75-4f9c-8ac2-1e7ea380ffef. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2681.183954] env[63279]: DEBUG nova.network.neutron [req-663da875-b43b-4966-a65f-61508be90a34 req-e7546e84-02ab-4635-9562-96bbbdb5fee4 service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Updating instance_info_cache with network_info: [{"id": "f18dd625-8e75-4f9c-8ac2-1e7ea380ffef", "address": "fa:16:3e:ca:99:1f", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf18dd625-8e", "ovs_interfaceid": "f18dd625-8e75-4f9c-8ac2-1e7ea380ffef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2681.274174] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52adbcca-65bd-294a-e4d3-7f2b8119e0c0, 'name': SearchDatastore_Task, 'duration_secs': 0.009979} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2681.274465] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2681.274699] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2681.274946] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2681.275133] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2681.275323] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2681.275572] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-256785b3-7987-4db7-b591-54bfbba9ce70 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2681.283526] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2681.283717] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2681.284406] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c24bfdd-d8c9-4e7d-b1e7-f50a7448b0b1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2681.289095] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2681.289095] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]5286f4b5-74bb-f8b8-aec5-2f9a3623c889" [ 2681.289095] env[63279]: _type = "Task" [ 2681.289095] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2681.295850] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5286f4b5-74bb-f8b8-aec5-2f9a3623c889, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2681.515621] env[63279]: DEBUG oslo_vmware.api [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088417, 'name': ReconfigVM_Task, 'duration_secs': 0.1573} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2681.515987] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2681.520504] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ede217e-4acb-48cd-b622-5466f8e9a4d9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2681.535821] env[63279]: DEBUG oslo_vmware.api [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2681.535821] env[63279]: value = "task-2088418" [ 2681.535821] env[63279]: _type = "Task" [ 2681.535821] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2681.544217] env[63279]: DEBUG oslo_vmware.api [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088418, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2681.687200] env[63279]: DEBUG oslo_concurrency.lockutils [req-663da875-b43b-4966-a65f-61508be90a34 req-e7546e84-02ab-4635-9562-96bbbdb5fee4 service nova] Releasing lock "refresh_cache-2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2681.799754] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]5286f4b5-74bb-f8b8-aec5-2f9a3623c889, 'name': SearchDatastore_Task, 'duration_secs': 0.008147} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2681.800561] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c914e8c-0abc-410c-a888-9de521930f70 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2681.805617] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2681.805617] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]524ebfb5-5634-14c2-0664-d5c8e7bbfc0f" [ 2681.805617] env[63279]: _type = "Task" [ 2681.805617] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2681.813909] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524ebfb5-5634-14c2-0664-d5c8e7bbfc0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2682.045706] env[63279]: DEBUG oslo_vmware.api [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088418, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2682.315242] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]524ebfb5-5634-14c2-0664-d5c8e7bbfc0f, 'name': SearchDatastore_Task, 'duration_secs': 0.009638} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2682.315504] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2682.315764] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820/2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2682.316032] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c0d9d42-b162-4352-8c51-9250eab16bed {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2682.321926] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2682.321926] env[63279]: value = "task-2088419" [ 2682.321926] env[63279]: _type = "Task" [ 2682.321926] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2682.329051] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088419, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2682.546390] env[63279]: DEBUG oslo_vmware.api [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088418, 'name': ReconfigVM_Task, 'duration_secs': 0.690886} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2682.546724] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427823', 'volume_id': 'b193588b-4d25-4ebf-8400-07b3cc22c215', 'name': 'volume-b193588b-4d25-4ebf-8400-07b3cc22c215', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '033a1d6d-db52-4902-8994-4d1537ab8658', 'attached_at': '2025-01-13T18:05:52.000000', 'detached_at': '', 'volume_id': 'b193588b-4d25-4ebf-8400-07b3cc22c215', 'serial': 'b193588b-4d25-4ebf-8400-07b3cc22c215'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2682.546997] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2682.547767] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a422eb-3c99-47c5-8fa9-df05b8a54546 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2682.553984] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2682.554203] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01d2a5cc-f047-4445-af92-e607dd5dcc7c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2682.641199] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2682.641431] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2682.641617] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleting the datastore file [datastore1] 033a1d6d-db52-4902-8994-4d1537ab8658 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2682.641884] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69cf4a08-ad87-4081-94a6-0b271239c66e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2682.648108] env[63279]: DEBUG oslo_vmware.api [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2682.648108] env[63279]: value = "task-2088421" [ 2682.648108] env[63279]: _type = "Task" [ 2682.648108] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2682.655456] env[63279]: DEBUG oslo_vmware.api [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2682.831517] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088419, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2683.157864] env[63279]: DEBUG oslo_vmware.api [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074315} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2683.158145] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2683.158342] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2683.158524] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2683.158703] env[63279]: INFO nova.compute.manager [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Took 2.75 seconds to destroy the instance on the hypervisor. [ 2683.158942] env[63279]: DEBUG oslo.service.loopingcall [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2683.159154] env[63279]: DEBUG nova.compute.manager [-] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2683.159250] env[63279]: DEBUG nova.network.neutron [-] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2683.333450] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088419, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2683.714523] env[63279]: DEBUG nova.compute.manager [req-6337484b-32b1-4c4e-8344-a7df5bb40bbc req-9c8945df-3af0-44fb-b138-99d61637a060 service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Received event network-vif-deleted-75bab2c4-7019-4de5-a02b-1b81e9419409 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2683.714682] env[63279]: INFO nova.compute.manager [req-6337484b-32b1-4c4e-8344-a7df5bb40bbc req-9c8945df-3af0-44fb-b138-99d61637a060 service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Neutron deleted interface 75bab2c4-7019-4de5-a02b-1b81e9419409; detaching it from the instance and deleting it from the info cache [ 2683.714818] env[63279]: DEBUG nova.network.neutron [req-6337484b-32b1-4c4e-8344-a7df5bb40bbc req-9c8945df-3af0-44fb-b138-99d61637a060 service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2683.833093] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088419, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.261985} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2683.833400] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820/2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2683.833613] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2683.833856] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2e55144-4ebd-4a7b-ab5b-2e523c79695b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2683.840884] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2683.840884] env[63279]: value = "task-2088422" [ 2683.840884] env[63279]: _type = "Task" [ 2683.840884] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2683.848754] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088422, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2684.195085] env[63279]: DEBUG nova.network.neutron [-] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2684.218629] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7bfff541-5975-44e9-86ee-697c9e305c81 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2684.228159] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea5a313-3420-4a52-bc4d-022cc29c8892 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2684.253574] env[63279]: DEBUG nova.compute.manager [req-6337484b-32b1-4c4e-8344-a7df5bb40bbc req-9c8945df-3af0-44fb-b138-99d61637a060 service nova] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Detach interface failed, port_id=75bab2c4-7019-4de5-a02b-1b81e9419409, reason: Instance 033a1d6d-db52-4902-8994-4d1537ab8658 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2684.350466] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088422, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072333} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2684.350807] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2684.351599] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2d74bd-76be-4233-8c8b-34abe78a12ca {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2684.372792] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820/2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2684.373037] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-605ac8c2-3ae6-4744-826c-e9873a82b0e4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2684.392090] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2684.392090] env[63279]: value = "task-2088423" [ 2684.392090] env[63279]: _type = "Task" [ 2684.392090] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2684.400115] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088423, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2684.698205] env[63279]: INFO nova.compute.manager [-] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Took 1.54 seconds to deallocate network for instance. [ 2684.902280] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088423, 'name': ReconfigVM_Task, 'duration_secs': 0.336014} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2684.902558] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820/2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2684.903174] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9adb9f2d-db9b-4378-aa8e-7fc6b59594a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2684.909398] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2684.909398] env[63279]: value = "task-2088424" [ 2684.909398] env[63279]: _type = "Task" [ 2684.909398] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2684.920636] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088424, 'name': Rename_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2685.241090] env[63279]: INFO nova.compute.manager [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Took 0.54 seconds to detach 1 volumes for instance. [ 2685.243282] env[63279]: DEBUG nova.compute.manager [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Deleting volume: b193588b-4d25-4ebf-8400-07b3cc22c215 {{(pid=63279) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 2685.419541] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088424, 'name': Rename_Task, 'duration_secs': 0.135559} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2685.419541] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2685.419740] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb3c1846-fe6a-486c-87f6-2928a46a521b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2685.425668] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2685.425668] env[63279]: value = "task-2088426" [ 2685.425668] env[63279]: _type = "Task" [ 2685.425668] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2685.434472] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088426, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2685.440500] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2685.440500] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 2685.784122] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2685.784122] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2685.784122] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2685.809555] env[63279]: INFO nova.scheduler.client.report [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleted allocations for instance 033a1d6d-db52-4902-8994-4d1537ab8658 [ 2685.935979] env[63279]: DEBUG oslo_vmware.api [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088426, 'name': PowerOnVM_Task, 'duration_secs': 0.46145} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2685.936332] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2685.936554] env[63279]: INFO nova.compute.manager [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Took 7.63 seconds to spawn the instance on the hypervisor. [ 2685.936737] env[63279]: DEBUG nova.compute.manager [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2685.937763] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0207bf4f-49a6-4814-af68-d0b995509a84 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2686.317556] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b03718ae-42a7-4cb2-830c-54a3068207a5 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "033a1d6d-db52-4902-8994-4d1537ab8658" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.416s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2686.455554] env[63279]: INFO nova.compute.manager [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Took 12.32 seconds to build instance. [ 2686.566300] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "cc0d3870-41fa-4cd4-a16d-e52e705f29a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2686.566564] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "cc0d3870-41fa-4cd4-a16d-e52e705f29a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2686.566777] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "cc0d3870-41fa-4cd4-a16d-e52e705f29a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2686.566965] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "cc0d3870-41fa-4cd4-a16d-e52e705f29a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2686.567159] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "cc0d3870-41fa-4cd4-a16d-e52e705f29a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2686.569290] env[63279]: INFO nova.compute.manager [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Terminating instance [ 2686.957871] env[63279]: DEBUG oslo_concurrency.lockutils [None req-207e83c2-f3de-432a-830a-f57fc1520624 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.829s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2687.072908] env[63279]: DEBUG nova.compute.manager [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2687.073203] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2687.074191] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932b1c94-3a7e-4180-a532-a0d89fe261e8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2687.082419] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2687.082655] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1e82b93-47e7-4105-ac0e-cbac18eaa460 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2687.088517] env[63279]: DEBUG oslo_vmware.api [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2687.088517] env[63279]: value = "task-2088427" [ 2687.088517] env[63279]: _type = "Task" [ 2687.088517] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2687.096334] env[63279]: DEBUG oslo_vmware.api [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088427, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2687.441653] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2687.555215] env[63279]: DEBUG nova.compute.manager [req-93ebfa34-a892-4d9d-b4bf-ea93ab7f72a4 req-7434d93d-6311-429e-b255-7079fc67a33d service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Received event network-changed-f18dd625-8e75-4f9c-8ac2-1e7ea380ffef {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2687.555441] env[63279]: DEBUG nova.compute.manager [req-93ebfa34-a892-4d9d-b4bf-ea93ab7f72a4 req-7434d93d-6311-429e-b255-7079fc67a33d service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Refreshing instance network info cache due to event network-changed-f18dd625-8e75-4f9c-8ac2-1e7ea380ffef. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2687.555666] env[63279]: DEBUG oslo_concurrency.lockutils [req-93ebfa34-a892-4d9d-b4bf-ea93ab7f72a4 req-7434d93d-6311-429e-b255-7079fc67a33d service nova] Acquiring lock "refresh_cache-2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2687.555819] env[63279]: DEBUG oslo_concurrency.lockutils [req-93ebfa34-a892-4d9d-b4bf-ea93ab7f72a4 req-7434d93d-6311-429e-b255-7079fc67a33d service nova] Acquired lock "refresh_cache-2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2687.555986] env[63279]: DEBUG nova.network.neutron [req-93ebfa34-a892-4d9d-b4bf-ea93ab7f72a4 req-7434d93d-6311-429e-b255-7079fc67a33d service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Refreshing network info cache for port f18dd625-8e75-4f9c-8ac2-1e7ea380ffef {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2687.600762] env[63279]: DEBUG oslo_vmware.api [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088427, 'name': PowerOffVM_Task, 'duration_secs': 0.221896} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2687.601088] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2687.601270] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2687.601556] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d0414ba-8abd-46d3-8df5-1f58d85b825e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2687.681295] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2687.681475] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2687.681661] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleting the datastore file [datastore1] cc0d3870-41fa-4cd4-a16d-e52e705f29a3 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2687.681920] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1c799ed-dbbb-45a1-907d-48592818b7c1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2687.688612] env[63279]: DEBUG oslo_vmware.api [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2687.688612] env[63279]: value = "task-2088429" [ 2687.688612] env[63279]: _type = "Task" [ 2687.688612] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2687.696637] env[63279]: DEBUG oslo_vmware.api [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088429, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2688.198934] env[63279]: DEBUG oslo_vmware.api [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088429, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129935} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2688.199323] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2688.199433] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2688.199616] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2688.199797] env[63279]: INFO nova.compute.manager [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2688.200066] env[63279]: DEBUG oslo.service.loopingcall [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2688.200264] env[63279]: DEBUG nova.compute.manager [-] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2688.200357] env[63279]: DEBUG nova.network.neutron [-] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2688.588261] env[63279]: DEBUG nova.network.neutron [req-93ebfa34-a892-4d9d-b4bf-ea93ab7f72a4 req-7434d93d-6311-429e-b255-7079fc67a33d service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Updated VIF entry in instance network info cache for port f18dd625-8e75-4f9c-8ac2-1e7ea380ffef. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2688.588651] env[63279]: DEBUG nova.network.neutron [req-93ebfa34-a892-4d9d-b4bf-ea93ab7f72a4 req-7434d93d-6311-429e-b255-7079fc67a33d service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Updating instance_info_cache with network_info: [{"id": "f18dd625-8e75-4f9c-8ac2-1e7ea380ffef", "address": "fa:16:3e:ca:99:1f", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf18dd625-8e", "ovs_interfaceid": "f18dd625-8e75-4f9c-8ac2-1e7ea380ffef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2689.091254] env[63279]: DEBUG oslo_concurrency.lockutils [req-93ebfa34-a892-4d9d-b4bf-ea93ab7f72a4 req-7434d93d-6311-429e-b255-7079fc67a33d service nova] Releasing lock "refresh_cache-2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2689.160981] env[63279]: DEBUG nova.network.neutron [-] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2689.441114] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2689.583133] env[63279]: DEBUG nova.compute.manager [req-9efcaabc-0bfc-4693-8233-92bc32244c5a req-1a60ff6f-70a2-41a1-9f34-cae493b84df9 service nova] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Received event network-vif-deleted-71e3fcb5-bcb8-4ad9-b60f-3cfb2943cff0 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2689.664076] env[63279]: INFO nova.compute.manager [-] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Took 1.46 seconds to deallocate network for instance. [ 2690.171074] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2690.171585] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2690.171971] env[63279]: DEBUG nova.objects.instance [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lazy-loading 'resources' on Instance uuid cc0d3870-41fa-4cd4-a16d-e52e705f29a3 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2690.441320] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2690.441572] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 2690.735274] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f58673-5ded-4ff0-bdfb-c823f51663d5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2690.743158] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c3114d-e6dd-4a4f-9ae1-6e269d1d126e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2690.773087] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016fc1dc-77cd-432c-882a-d84bb7b0b289 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2690.779932] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ce776d-50e0-47f8-bfef-bc2c2c93ca65 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2690.792595] env[63279]: DEBUG nova.compute.provider_tree [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2691.296354] env[63279]: DEBUG nova.scheduler.client.report [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2691.462663] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "refresh_cache-cc0d3870-41fa-4cd4-a16d-e52e705f29a3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2691.462924] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquired lock "refresh_cache-cc0d3870-41fa-4cd4-a16d-e52e705f29a3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2691.462989] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Forcefully refreshing network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2691.801291] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.630s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2691.818333] env[63279]: INFO nova.scheduler.client.report [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleted allocations for instance cc0d3870-41fa-4cd4-a16d-e52e705f29a3 [ 2691.979969] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2692.325637] env[63279]: DEBUG oslo_concurrency.lockutils [None req-9b45019b-ec69-4a7b-ae36-18c9c0606905 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "cc0d3870-41fa-4cd4-a16d-e52e705f29a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.759s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2692.570503] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2693.073110] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Releasing lock "refresh_cache-cc0d3870-41fa-4cd4-a16d-e52e705f29a3" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2693.073339] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Updated the network info_cache for instance {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10518}} [ 2693.073564] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2693.441291] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2693.441515] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2693.944492] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2693.944862] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2693.944899] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2693.945089] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2693.946052] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9ca91d-225a-402e-9cfa-a2fab9ca2560 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2693.954732] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66397b1-fc40-4540-ae80-eaf69a9302c1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2693.969491] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ca12aa-2d4e-483e-9477-6d775e26dd51 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2693.975714] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e87b18-407e-41cf-8e81-a3c2a76e1399 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2694.003596] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180386MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2694.003769] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2694.003962] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2695.028843] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance befc985f-68e2-4a04-8de0-9ca9bb3fa504 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2695.029107] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2695.029257] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2695.029417] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2695.067904] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96a625b-f451-4ac4-adeb-8ca816913852 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2695.075305] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4cdf16f-5976-476d-a61a-5f42f90cfc6d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2695.104721] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a00fff-7f08-4d98-bb6c-07eb14de39db {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2695.111366] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd484b23-74d0-4238-9a17-01c2a7b566b0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2695.123909] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2695.627051] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2696.132151] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2696.132491] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.128s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2696.132534] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2696.132665] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Cleaning up deleted instances {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11747}} [ 2696.266560] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2696.266910] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2696.267231] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2696.267506] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2696.267746] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2696.270278] env[63279]: INFO nova.compute.manager [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Terminating instance [ 2696.639421] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] There are 19 instances to clean {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11756}} [ 2696.639655] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 033a1d6d-db52-4902-8994-4d1537ab8658] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2696.774124] env[63279]: DEBUG nova.compute.manager [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2696.774352] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2696.775220] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b243b9-4480-44d9-b40f-ef17a9f85410 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2696.782650] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2696.782871] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e4f26cff-df24-4606-8d9b-b82236ae428e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2696.788538] env[63279]: DEBUG oslo_vmware.api [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2696.788538] env[63279]: value = "task-2088431" [ 2696.788538] env[63279]: _type = "Task" [ 2696.788538] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2696.796366] env[63279]: DEBUG oslo_vmware.api [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088431, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2697.142985] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 70f2ea82-1b68-478e-8195-f9754eb051ae] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2697.299606] env[63279]: DEBUG oslo_vmware.api [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088431, 'name': PowerOffVM_Task, 'duration_secs': 0.195658} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2697.299833] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2697.300010] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2697.300271] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c186835b-b759-463a-8878-fc649b9489bb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2697.646176] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: cc0d3870-41fa-4cd4-a16d-e52e705f29a3] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2698.150022] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 30ac4320-5ee0-424b-9bbb-e2d53277be80] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2698.653158] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: c5251417-7bf6-4ed9-9c98-2fa851e4ac27] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2698.977566] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2698.977778] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2698.977958] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleting the datastore file [datastore1] befc985f-68e2-4a04-8de0-9ca9bb3fa504 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2698.978262] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5fee2eae-3c52-4eea-89c3-3a4f876652cb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2698.984993] env[63279]: DEBUG oslo_vmware.api [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for the task: (returnval){ [ 2698.984993] env[63279]: value = "task-2088433" [ 2698.984993] env[63279]: _type = "Task" [ 2698.984993] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2698.992463] env[63279]: DEBUG oslo_vmware.api [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088433, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2699.156406] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 011af8fa-0f76-44a8-8b24-b3d65f5e841e] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2699.494367] env[63279]: DEBUG oslo_vmware.api [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Task: {'id': task-2088433, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120692} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2699.494621] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2699.494845] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2699.495045] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2699.495300] env[63279]: INFO nova.compute.manager [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Took 2.72 seconds to destroy the instance on the hypervisor. [ 2699.495558] env[63279]: DEBUG oslo.service.loopingcall [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2699.495748] env[63279]: DEBUG nova.compute.manager [-] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2699.495843] env[63279]: DEBUG nova.network.neutron [-] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2699.660129] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: c24509d7-0ef2-4a5e-843d-c843888118cc] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2699.751195] env[63279]: DEBUG nova.compute.manager [req-f0908566-38be-4e57-a23c-d49969e69c1a req-6766a4c4-6f36-4444-9df4-52fe97f46bc6 service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Received event network-vif-deleted-48a709b0-3a67-4115-a178-0872536d2417 {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2699.751381] env[63279]: INFO nova.compute.manager [req-f0908566-38be-4e57-a23c-d49969e69c1a req-6766a4c4-6f36-4444-9df4-52fe97f46bc6 service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Neutron deleted interface 48a709b0-3a67-4115-a178-0872536d2417; detaching it from the instance and deleting it from the info cache [ 2699.751551] env[63279]: DEBUG nova.network.neutron [req-f0908566-38be-4e57-a23c-d49969e69c1a req-6766a4c4-6f36-4444-9df4-52fe97f46bc6 service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2700.163605] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 1520999b-e3e9-41b3-82e4-91bb556e96c4] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2700.229920] env[63279]: DEBUG nova.network.neutron [-] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2700.254094] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2bb81628-5f4d-4869-b93b-698cae4a430e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2700.264294] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7d674e-58bf-4234-907a-e0635952e7c8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2700.288650] env[63279]: DEBUG nova.compute.manager [req-f0908566-38be-4e57-a23c-d49969e69c1a req-6766a4c4-6f36-4444-9df4-52fe97f46bc6 service nova] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Detach interface failed, port_id=48a709b0-3a67-4115-a178-0872536d2417, reason: Instance befc985f-68e2-4a04-8de0-9ca9bb3fa504 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2700.667777] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 3832508d-5d12-42a2-93d8-61775907b2d2] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2700.732788] env[63279]: INFO nova.compute.manager [-] [instance: befc985f-68e2-4a04-8de0-9ca9bb3fa504] Took 1.24 seconds to deallocate network for instance. [ 2701.171558] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 6c4d6006-656b-4d30-a595-8985d7f6b1e7] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2701.239325] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2701.239604] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2701.239831] env[63279]: DEBUG nova.objects.instance [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lazy-loading 'resources' on Instance uuid befc985f-68e2-4a04-8de0-9ca9bb3fa504 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2701.674756] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 6e8d9959-7c9d-48ee-81a5-bbdc6234248f] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2701.781637] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f841f2-f8ef-4cd5-b8de-7216d4d947fe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2701.789406] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454c8429-e594-497f-a07b-f8af1ab592ba {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2701.818531] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a538e2-3f39-4dcd-8122-e9ed9e5d43d7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2701.825546] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79dd4d98-283f-404e-bba7-39c20b4d59fb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2701.838847] env[63279]: DEBUG nova.compute.provider_tree [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2702.178300] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 5961728f-c65a-425d-8f6d-5763c79bbe1a] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2702.341523] env[63279]: DEBUG nova.scheduler.client.report [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2702.681677] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 246b5346-93b1-4a84-921c-d028f3554d3d] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2702.845869] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.606s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2702.863640] env[63279]: INFO nova.scheduler.client.report [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Deleted allocations for instance befc985f-68e2-4a04-8de0-9ca9bb3fa504 [ 2703.185499] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 36354325-dee0-406e-8eb6-bc3cf347a403] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2703.371377] env[63279]: DEBUG oslo_concurrency.lockutils [None req-8a368b63-92ca-4cb2-888e-8860bf64b575 tempest-ServerActionsTestOtherA-605125166 tempest-ServerActionsTestOtherA-605125166-project-member] Lock "befc985f-68e2-4a04-8de0-9ca9bb3fa504" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.104s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2703.688449] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 6f839780-be92-4d99-a96d-1fc14c819599] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2704.191917] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: efddec10-b496-446e-a05a-72c9f2d86ed9] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2704.695154] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 19e10ee4-99d1-44b9-9354-4c162d541a1f] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2705.198302] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 69b3269a-2ba3-4f5f-a29c-62518c93da3d] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2705.702474] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: fcdd3076-2b53-4850-9730-2f877e2cabfd] Instance has had 0 of 5 cleanup attempts {{(pid=63279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11760}} [ 2706.207506] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2706.207980] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Cleaning up deleted instances with incomplete migration {{(pid=63279) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11785}} [ 2708.709485] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2708.709940] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2721.441467] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2726.304662] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2726.304933] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2726.808443] env[63279]: DEBUG nova.compute.utils [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2727.311124] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2728.369364] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2728.369792] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2728.370209] env[63279]: INFO nova.compute.manager [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Attaching volume b586b390-8633-4d55-9233-038aecdc55a5 to /dev/sdb [ 2728.403849] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5b43ff-93f1-4996-a171-4bcdf4b85803 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2728.413553] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b55a3280-3aab-4c49-b337-639c025bd238 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2728.429414] env[63279]: DEBUG nova.virt.block_device [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Updating existing volume attachment record: 637c1881-4614-4458-b3a2-c15e7fd5c66f {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2732.972575] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2732.972836] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427827', 'volume_id': 'b586b390-8633-4d55-9233-038aecdc55a5', 'name': 'volume-b586b390-8633-4d55-9233-038aecdc55a5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820', 'attached_at': '', 'detached_at': '', 'volume_id': 'b586b390-8633-4d55-9233-038aecdc55a5', 'serial': 'b586b390-8633-4d55-9233-038aecdc55a5'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2732.973740] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab47bb1-e085-4b14-9bd7-c238b3827999 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2732.989825] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f27260-c36e-4176-81c6-439a68872256 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2733.013210] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] volume-b586b390-8633-4d55-9233-038aecdc55a5/volume-b586b390-8633-4d55-9233-038aecdc55a5.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2733.013528] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa46a186-9c2b-4080-97e8-302339cb396d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2733.030944] env[63279]: DEBUG oslo_vmware.api [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2733.030944] env[63279]: value = "task-2088436" [ 2733.030944] env[63279]: _type = "Task" [ 2733.030944] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2733.039904] env[63279]: DEBUG oslo_vmware.api [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088436, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2733.540280] env[63279]: DEBUG oslo_vmware.api [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088436, 'name': ReconfigVM_Task, 'duration_secs': 0.315605} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2733.541063] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Reconfigured VM instance instance-0000007c to attach disk [datastore1] volume-b586b390-8633-4d55-9233-038aecdc55a5/volume-b586b390-8633-4d55-9233-038aecdc55a5.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2733.545354] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2725346d-2dfd-469c-aed2-f052a39a8b61 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2733.559345] env[63279]: DEBUG oslo_vmware.api [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2733.559345] env[63279]: value = "task-2088437" [ 2733.559345] env[63279]: _type = "Task" [ 2733.559345] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2733.569587] env[63279]: DEBUG oslo_vmware.api [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088437, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2734.072803] env[63279]: DEBUG oslo_vmware.api [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088437, 'name': ReconfigVM_Task, 'duration_secs': 0.140905} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2734.073262] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427827', 'volume_id': 'b586b390-8633-4d55-9233-038aecdc55a5', 'name': 'volume-b586b390-8633-4d55-9233-038aecdc55a5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820', 'attached_at': '', 'detached_at': '', 'volume_id': 'b586b390-8633-4d55-9233-038aecdc55a5', 'serial': 'b586b390-8633-4d55-9233-038aecdc55a5'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2735.109631] env[63279]: DEBUG nova.objects.instance [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lazy-loading 'flavor' on Instance uuid 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2735.615911] env[63279]: DEBUG oslo_concurrency.lockutils [None req-5dc36567-ad3b-4928-8a62-ac3ed4ae85c3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.246s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2735.806701] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2735.806981] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2736.310068] env[63279]: INFO nova.compute.manager [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Detaching volume b586b390-8633-4d55-9233-038aecdc55a5 [ 2736.338933] env[63279]: INFO nova.virt.block_device [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Attempting to driver detach volume b586b390-8633-4d55-9233-038aecdc55a5 from mountpoint /dev/sdb [ 2736.339182] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2736.339377] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427827', 'volume_id': 'b586b390-8633-4d55-9233-038aecdc55a5', 'name': 'volume-b586b390-8633-4d55-9233-038aecdc55a5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820', 'attached_at': '', 'detached_at': '', 'volume_id': 'b586b390-8633-4d55-9233-038aecdc55a5', 'serial': 'b586b390-8633-4d55-9233-038aecdc55a5'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2736.340233] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20628e87-cb3f-47ce-b172-3c4841099fe7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2736.361172] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7014dc2-b921-402b-9d7d-990352abdbfa {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2736.367865] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60234db-aa1a-4c27-a145-42c9154981a5 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2736.388920] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48287775-0a02-4081-8360-b3430401fc38 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2736.403080] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] The volume has not been displaced from its original location: [datastore1] volume-b586b390-8633-4d55-9233-038aecdc55a5/volume-b586b390-8633-4d55-9233-038aecdc55a5.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2736.408234] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Reconfiguring VM instance instance-0000007c to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2736.408503] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-839ddd82-7644-4b6e-87f9-071f511604cc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2736.426462] env[63279]: DEBUG oslo_vmware.api [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2736.426462] env[63279]: value = "task-2088438" [ 2736.426462] env[63279]: _type = "Task" [ 2736.426462] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2736.434044] env[63279]: DEBUG oslo_vmware.api [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088438, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2736.935655] env[63279]: DEBUG oslo_vmware.api [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088438, 'name': ReconfigVM_Task, 'duration_secs': 0.216432} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2736.936052] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Reconfigured VM instance instance-0000007c to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2736.940742] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c5251a5-c48f-4e6a-b4ad-bf0c1d2546cf {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2736.955782] env[63279]: DEBUG oslo_vmware.api [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2736.955782] env[63279]: value = "task-2088439" [ 2736.955782] env[63279]: _type = "Task" [ 2736.955782] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2736.963505] env[63279]: DEBUG oslo_vmware.api [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088439, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2737.465614] env[63279]: DEBUG oslo_vmware.api [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088439, 'name': ReconfigVM_Task, 'duration_secs': 0.125181} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2737.465933] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427827', 'volume_id': 'b586b390-8633-4d55-9233-038aecdc55a5', 'name': 'volume-b586b390-8633-4d55-9233-038aecdc55a5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820', 'attached_at': '', 'detached_at': '', 'volume_id': 'b586b390-8633-4d55-9233-038aecdc55a5', 'serial': 'b586b390-8633-4d55-9233-038aecdc55a5'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2738.006698] env[63279]: DEBUG nova.objects.instance [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lazy-loading 'flavor' on Instance uuid 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2739.013062] env[63279]: DEBUG oslo_concurrency.lockutils [None req-e781cb27-ca8d-4b74-bc3b-c985cf494e37 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.205s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2740.034860] env[63279]: DEBUG oslo_concurrency.lockutils [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2740.035211] env[63279]: DEBUG oslo_concurrency.lockutils [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2740.035325] env[63279]: DEBUG oslo_concurrency.lockutils [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2740.035516] env[63279]: DEBUG oslo_concurrency.lockutils [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2740.035823] env[63279]: DEBUG oslo_concurrency.lockutils [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2740.037863] env[63279]: INFO nova.compute.manager [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Terminating instance [ 2740.542015] env[63279]: DEBUG nova.compute.manager [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2740.542289] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2740.543216] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb1fc65-c8f7-4ae8-96fd-58f04083dabc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2740.552032] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2740.552032] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1763f6d7-8ffc-4f81-9fff-9079c083010c {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2740.557298] env[63279]: DEBUG oslo_vmware.api [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2740.557298] env[63279]: value = "task-2088440" [ 2740.557298] env[63279]: _type = "Task" [ 2740.557298] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2740.570080] env[63279]: DEBUG oslo_vmware.api [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088440, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2741.068171] env[63279]: DEBUG oslo_vmware.api [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088440, 'name': PowerOffVM_Task, 'duration_secs': 0.160126} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2741.068539] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2741.068605] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2741.068806] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ef8bead-be57-45a3-a2a4-9c17f0bbf2df {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2741.169639] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2741.169870] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2741.170072] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Deleting the datastore file [datastore1] 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2741.170342] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1360f457-d2f2-40be-989c-a3f8fb5b1ab2 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2741.176815] env[63279]: DEBUG oslo_vmware.api [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2741.176815] env[63279]: value = "task-2088442" [ 2741.176815] env[63279]: _type = "Task" [ 2741.176815] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2741.184313] env[63279]: DEBUG oslo_vmware.api [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088442, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2741.686743] env[63279]: DEBUG oslo_vmware.api [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088442, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136199} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2741.687014] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2741.687225] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2741.687409] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2741.687597] env[63279]: INFO nova.compute.manager [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2741.687843] env[63279]: DEBUG oslo.service.loopingcall [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2741.688057] env[63279]: DEBUG nova.compute.manager [-] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2741.688171] env[63279]: DEBUG nova.network.neutron [-] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2742.223308] env[63279]: DEBUG nova.compute.manager [req-b23492f2-6f68-4c56-8692-4fae07f7fd20 req-7a61905a-22ac-4fe1-90f9-036f9b3df29c service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Received event network-vif-deleted-f18dd625-8e75-4f9c-8ac2-1e7ea380ffef {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2742.223572] env[63279]: INFO nova.compute.manager [req-b23492f2-6f68-4c56-8692-4fae07f7fd20 req-7a61905a-22ac-4fe1-90f9-036f9b3df29c service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Neutron deleted interface f18dd625-8e75-4f9c-8ac2-1e7ea380ffef; detaching it from the instance and deleting it from the info cache [ 2742.223707] env[63279]: DEBUG nova.network.neutron [req-b23492f2-6f68-4c56-8692-4fae07f7fd20 req-7a61905a-22ac-4fe1-90f9-036f9b3df29c service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2742.583107] env[63279]: DEBUG nova.network.neutron [-] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2742.726580] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d6d0d24-0366-4f7e-ba1d-7378bf8231d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2742.737071] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22fd224-8385-4e5c-93ee-ae4859fadd55 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2742.761073] env[63279]: DEBUG nova.compute.manager [req-b23492f2-6f68-4c56-8692-4fae07f7fd20 req-7a61905a-22ac-4fe1-90f9-036f9b3df29c service nova] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Detach interface failed, port_id=f18dd625-8e75-4f9c-8ac2-1e7ea380ffef, reason: Instance 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2743.085843] env[63279]: INFO nova.compute.manager [-] [instance: 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820] Took 1.40 seconds to deallocate network for instance. [ 2743.591938] env[63279]: DEBUG oslo_concurrency.lockutils [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2743.592224] env[63279]: DEBUG oslo_concurrency.lockutils [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2743.592452] env[63279]: DEBUG nova.objects.instance [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lazy-loading 'resources' on Instance uuid 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2744.195409] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613171f1-7b67-472f-9e1f-0066a0700963 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2744.202610] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63fc961-fb5f-42f4-aeb9-e58e29172bf9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2744.232382] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b87a7c-ac34-44f5-8e67-0a0331b642ce {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2744.239645] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da40e533-b309-418c-88a6-158e103a5838 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2744.252756] env[63279]: DEBUG nova.compute.provider_tree [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2744.756696] env[63279]: DEBUG nova.scheduler.client.report [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2745.261755] env[63279]: DEBUG oslo_concurrency.lockutils [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.669s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2745.285095] env[63279]: INFO nova.scheduler.client.report [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Deleted allocations for instance 2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820 [ 2745.793518] env[63279]: DEBUG oslo_concurrency.lockutils [None req-499c4cb7-cf1d-4519-bee0-696216781e68 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "2ed7e467-a6aa-4ed7-8ad8-bfc03ff22820" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.758s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2747.944088] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2747.944368] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2747.944495] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 2747.981039] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "1afa917a-237b-43b2-9863-47f227c94be4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2747.981279] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "1afa917a-237b-43b2-9863-47f227c94be4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2748.483088] env[63279]: DEBUG nova.compute.manager [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Starting instance... {{(pid=63279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2749.005191] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2749.005475] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2749.006977] env[63279]: INFO nova.compute.claims [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2750.043058] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285bf53e-3939-4085-bf43-c4c6a205e8a1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2750.050707] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e090aa89-0dc5-4fbd-9cf1-cf331d7ae3c6 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2750.079708] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b661870-e0a2-40cf-806b-2deafe6ba485 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2750.086192] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e12ba1-8639-4ae5-ad81-96e668f1bb6d {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2750.098771] env[63279]: DEBUG nova.compute.provider_tree [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2750.441784] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2750.602455] env[63279]: DEBUG nova.scheduler.client.report [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2751.109035] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.103s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2751.109597] env[63279]: DEBUG nova.compute.manager [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Start building networks asynchronously for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2751.614847] env[63279]: DEBUG nova.compute.utils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2751.616196] env[63279]: DEBUG nova.compute.manager [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Allocating IP information in the background. {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2751.616369] env[63279]: DEBUG nova.network.neutron [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] allocate_for_instance() {{(pid=63279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2751.667431] env[63279]: DEBUG nova.policy [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '233f5d339e7b438e910eb03d33891c16', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5fa421934ecd4054a70528644a40349e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63279) authorize /opt/stack/nova/nova/policy.py:192}} [ 2751.934590] env[63279]: DEBUG nova.network.neutron [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Successfully created port: a738ed39-3cfa-4def-a402-aff0ef6390ab {{(pid=63279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2752.123071] env[63279]: DEBUG nova.compute.manager [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Start building block device mappings for instance. {{(pid=63279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2752.441051] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2752.441051] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 2753.131896] env[63279]: DEBUG nova.compute.manager [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Start spawning the instance on the hypervisor. {{(pid=63279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2753.156753] env[63279]: DEBUG nova.virt.hardware [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-13T17:30:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-13T17:30:01Z,direct_url=,disk_format='vmdk',id=30887889-e45b-4f67-8b3c-16216e594a90,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a935e86eee0a4c38adfe0367d2097a61',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-13T17:30:02Z,virtual_size=,visibility=), allow threads: False {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2753.156993] env[63279]: DEBUG nova.virt.hardware [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Flavor limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2753.157181] env[63279]: DEBUG nova.virt.hardware [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Image limits 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2753.157374] env[63279]: DEBUG nova.virt.hardware [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Flavor pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2753.157526] env[63279]: DEBUG nova.virt.hardware [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Image pref 0:0:0 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2753.157676] env[63279]: DEBUG nova.virt.hardware [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2753.157886] env[63279]: DEBUG nova.virt.hardware [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2753.158149] env[63279]: DEBUG nova.virt.hardware [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2753.158355] env[63279]: DEBUG nova.virt.hardware [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Got 1 possible topologies {{(pid=63279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2753.158527] env[63279]: DEBUG nova.virt.hardware [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2753.158706] env[63279]: DEBUG nova.virt.hardware [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2753.159573] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca8c506-242a-4346-8546-4bedc9e994ea {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2753.167617] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9af97f0-bead-434e-99aa-b48b842d5bb3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2753.446545] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Didn't find any instances for network info cache update. {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10533}} [ 2753.446740] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2754.441103] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2754.441440] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2754.944809] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2754.945126] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2754.945306] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2754.945466] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2754.946346] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298a6c22-c70a-4232-93e9-2195abda6efe {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2754.954928] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63907d6-f9e3-47ee-9018-0f64aca507b9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2754.970340] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4586d22c-0877-43f5-8186-b170f17b28db {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2754.976955] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6370d5-67a6-4905-b650-b662d5c08b8e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2755.005292] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181101MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2755.005446] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2755.005642] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2756.029842] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 1afa917a-237b-43b2-9863-47f227c94be4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2756.029842] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2756.029842] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2756.056252] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d96da60-c960-49cc-bcb7-31fb98fa4b8b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.064477] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33faa74e-c8e2-422f-b4b8-4b0b0fbde274 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.094718] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049cbe6e-c5ab-42bb-aa20-874134939e01 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.102190] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2efb3075-eea8-4bcc-a8d2-c6448c2e2804 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.115594] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2756.619016] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2756.890940] env[63279]: DEBUG nova.compute.manager [req-a1308c75-685f-40f6-8ec7-c52751afe92e req-15a5292f-9a3c-4871-a9fd-446fdcd82be8 service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Received event network-vif-plugged-a738ed39-3cfa-4def-a402-aff0ef6390ab {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2756.891350] env[63279]: DEBUG oslo_concurrency.lockutils [req-a1308c75-685f-40f6-8ec7-c52751afe92e req-15a5292f-9a3c-4871-a9fd-446fdcd82be8 service nova] Acquiring lock "1afa917a-237b-43b2-9863-47f227c94be4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2756.891706] env[63279]: DEBUG oslo_concurrency.lockutils [req-a1308c75-685f-40f6-8ec7-c52751afe92e req-15a5292f-9a3c-4871-a9fd-446fdcd82be8 service nova] Lock "1afa917a-237b-43b2-9863-47f227c94be4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2756.892025] env[63279]: DEBUG oslo_concurrency.lockutils [req-a1308c75-685f-40f6-8ec7-c52751afe92e req-15a5292f-9a3c-4871-a9fd-446fdcd82be8 service nova] Lock "1afa917a-237b-43b2-9863-47f227c94be4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2756.892335] env[63279]: DEBUG nova.compute.manager [req-a1308c75-685f-40f6-8ec7-c52751afe92e req-15a5292f-9a3c-4871-a9fd-446fdcd82be8 service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] No waiting events found dispatching network-vif-plugged-a738ed39-3cfa-4def-a402-aff0ef6390ab {{(pid=63279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2756.892639] env[63279]: WARNING nova.compute.manager [req-a1308c75-685f-40f6-8ec7-c52751afe92e req-15a5292f-9a3c-4871-a9fd-446fdcd82be8 service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Received unexpected event network-vif-plugged-a738ed39-3cfa-4def-a402-aff0ef6390ab for instance with vm_state building and task_state spawning. [ 2756.977226] env[63279]: DEBUG nova.network.neutron [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Successfully updated port: a738ed39-3cfa-4def-a402-aff0ef6390ab {{(pid=63279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2757.124030] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2757.124030] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.118s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2757.479686] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "refresh_cache-1afa917a-237b-43b2-9863-47f227c94be4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2757.479855] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired lock "refresh_cache-1afa917a-237b-43b2-9863-47f227c94be4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2757.480032] env[63279]: DEBUG nova.network.neutron [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Building network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2758.019108] env[63279]: DEBUG nova.network.neutron [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Instance cache missing network info. {{(pid=63279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2758.123532] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2758.145793] env[63279]: DEBUG nova.network.neutron [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Updating instance_info_cache with network_info: [{"id": "a738ed39-3cfa-4def-a402-aff0ef6390ab", "address": "fa:16:3e:bd:74:e6", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa738ed39-3c", "ovs_interfaceid": "a738ed39-3cfa-4def-a402-aff0ef6390ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2758.441425] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2758.648109] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Releasing lock "refresh_cache-1afa917a-237b-43b2-9863-47f227c94be4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2758.648459] env[63279]: DEBUG nova.compute.manager [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Instance network_info: |[{"id": "a738ed39-3cfa-4def-a402-aff0ef6390ab", "address": "fa:16:3e:bd:74:e6", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa738ed39-3c", "ovs_interfaceid": "a738ed39-3cfa-4def-a402-aff0ef6390ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2758.648936] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:74:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e41070eb-3ac1-4ca9-a3d0-fd65893a97de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a738ed39-3cfa-4def-a402-aff0ef6390ab', 'vif_model': 'vmxnet3'}] {{(pid=63279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2758.656401] env[63279]: DEBUG oslo.service.loopingcall [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2758.656617] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Creating VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2758.656844] env[63279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-721ae768-f20c-4656-99c5-54bf7b46818b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2758.676959] env[63279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2758.676959] env[63279]: value = "task-2088443" [ 2758.676959] env[63279]: _type = "Task" [ 2758.676959] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2758.684401] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088443, 'name': CreateVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2758.921575] env[63279]: DEBUG nova.compute.manager [req-ed2c1b38-0f94-41da-ae01-19f7b6afa34d req-f40b35aa-9e5b-48af-bed7-6891463930ff service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Received event network-changed-a738ed39-3cfa-4def-a402-aff0ef6390ab {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2758.921870] env[63279]: DEBUG nova.compute.manager [req-ed2c1b38-0f94-41da-ae01-19f7b6afa34d req-f40b35aa-9e5b-48af-bed7-6891463930ff service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Refreshing instance network info cache due to event network-changed-a738ed39-3cfa-4def-a402-aff0ef6390ab. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2758.922179] env[63279]: DEBUG oslo_concurrency.lockutils [req-ed2c1b38-0f94-41da-ae01-19f7b6afa34d req-f40b35aa-9e5b-48af-bed7-6891463930ff service nova] Acquiring lock "refresh_cache-1afa917a-237b-43b2-9863-47f227c94be4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2758.922327] env[63279]: DEBUG oslo_concurrency.lockutils [req-ed2c1b38-0f94-41da-ae01-19f7b6afa34d req-f40b35aa-9e5b-48af-bed7-6891463930ff service nova] Acquired lock "refresh_cache-1afa917a-237b-43b2-9863-47f227c94be4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2758.922500] env[63279]: DEBUG nova.network.neutron [req-ed2c1b38-0f94-41da-ae01-19f7b6afa34d req-f40b35aa-9e5b-48af-bed7-6891463930ff service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Refreshing network info cache for port a738ed39-3cfa-4def-a402-aff0ef6390ab {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2759.186694] env[63279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2088443, 'name': CreateVM_Task, 'duration_secs': 0.307763} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2759.187040] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Created VM on the ESX host {{(pid=63279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2759.187527] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2759.187697] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2759.188059] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2759.188326] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4105da8a-5190-4fee-a415-197f35a95ca7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.192907] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2759.192907] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52a64a1d-49c3-f2ec-c9fb-fba72163865e" [ 2759.192907] env[63279]: _type = "Task" [ 2759.192907] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2759.201745] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a64a1d-49c3-f2ec-c9fb-fba72163865e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2759.614722] env[63279]: DEBUG nova.network.neutron [req-ed2c1b38-0f94-41da-ae01-19f7b6afa34d req-f40b35aa-9e5b-48af-bed7-6891463930ff service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Updated VIF entry in instance network info cache for port a738ed39-3cfa-4def-a402-aff0ef6390ab. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2759.615129] env[63279]: DEBUG nova.network.neutron [req-ed2c1b38-0f94-41da-ae01-19f7b6afa34d req-f40b35aa-9e5b-48af-bed7-6891463930ff service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Updating instance_info_cache with network_info: [{"id": "a738ed39-3cfa-4def-a402-aff0ef6390ab", "address": "fa:16:3e:bd:74:e6", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa738ed39-3c", "ovs_interfaceid": "a738ed39-3cfa-4def-a402-aff0ef6390ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2759.703315] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52a64a1d-49c3-f2ec-c9fb-fba72163865e, 'name': SearchDatastore_Task, 'duration_secs': 0.011432} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2759.703619] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2759.703854] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Processing image 30887889-e45b-4f67-8b3c-16216e594a90 {{(pid=63279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2759.704154] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2759.704284] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquired lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2759.704471] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2759.704728] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-945d7e8c-c38b-4376-80ce-fc659dd467e0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.712504] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2759.712669] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2759.713389] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d897490-fc8c-4782-a549-074567fd7491 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.718190] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2759.718190] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52547542-9a14-a1d6-3f59-1fa21ad37e62" [ 2759.718190] env[63279]: _type = "Task" [ 2759.718190] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2759.725206] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52547542-9a14-a1d6-3f59-1fa21ad37e62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2760.117948] env[63279]: DEBUG oslo_concurrency.lockutils [req-ed2c1b38-0f94-41da-ae01-19f7b6afa34d req-f40b35aa-9e5b-48af-bed7-6891463930ff service nova] Releasing lock "refresh_cache-1afa917a-237b-43b2-9863-47f227c94be4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2760.228545] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52547542-9a14-a1d6-3f59-1fa21ad37e62, 'name': SearchDatastore_Task, 'duration_secs': 0.007635} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2760.229380] env[63279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-625bc7b4-53da-4086-b055-153acb4280d8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2760.234300] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2760.234300] env[63279]: value = "session[527edb51-86b9-2668-51b3-676f2d4ed098]52b0dba8-404a-b316-383e-6d6d765adb8e" [ 2760.234300] env[63279]: _type = "Task" [ 2760.234300] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2760.241401] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b0dba8-404a-b316-383e-6d6d765adb8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2760.745067] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': session[527edb51-86b9-2668-51b3-676f2d4ed098]52b0dba8-404a-b316-383e-6d6d765adb8e, 'name': SearchDatastore_Task, 'duration_secs': 0.008852} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2760.745067] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Releasing lock "[datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2760.745067] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1afa917a-237b-43b2-9863-47f227c94be4/1afa917a-237b-43b2-9863-47f227c94be4.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2760.745067] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-556f62be-d4d6-4455-b6e9-ee96299a8331 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2760.750996] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2760.750996] env[63279]: value = "task-2088444" [ 2760.750996] env[63279]: _type = "Task" [ 2760.750996] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2760.758815] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088444, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2761.261065] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088444, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.42268} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2761.261065] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/30887889-e45b-4f67-8b3c-16216e594a90/30887889-e45b-4f67-8b3c-16216e594a90.vmdk to [datastore1] 1afa917a-237b-43b2-9863-47f227c94be4/1afa917a-237b-43b2-9863-47f227c94be4.vmdk {{(pid=63279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2761.261065] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Extending root virtual disk to 1048576 {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2761.261594] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-afdf3ec0-7967-4093-b7af-f30ea6a77dd1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2761.268099] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2761.268099] env[63279]: value = "task-2088445" [ 2761.268099] env[63279]: _type = "Task" [ 2761.268099] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2761.275109] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088445, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2761.778157] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088445, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.315576} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2761.778436] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Extended root virtual disk {{(pid=63279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2761.779181] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed81eb7-091c-4f8e-8e79-1c3a49fe2f23 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2761.800411] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 1afa917a-237b-43b2-9863-47f227c94be4/1afa917a-237b-43b2-9863-47f227c94be4.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2761.800639] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dff03026-b67e-4619-94ad-048c0e48e774 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2761.818955] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2761.818955] env[63279]: value = "task-2088446" [ 2761.818955] env[63279]: _type = "Task" [ 2761.818955] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2761.825904] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088446, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2762.328204] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088446, 'name': ReconfigVM_Task, 'duration_secs': 0.263189} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2762.328574] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 1afa917a-237b-43b2-9863-47f227c94be4/1afa917a-237b-43b2-9863-47f227c94be4.vmdk or device None with type sparse {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2762.329111] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-811e8c46-5fd6-449c-b40f-6dd3ec138f3a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2762.334843] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2762.334843] env[63279]: value = "task-2088447" [ 2762.334843] env[63279]: _type = "Task" [ 2762.334843] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2762.343548] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088447, 'name': Rename_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2762.844352] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088447, 'name': Rename_Task, 'duration_secs': 0.139191} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2762.844634] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Powering on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2762.844873] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db5076bf-2a52-4de3-ac3b-e9ff1dde8482 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2762.850445] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2762.850445] env[63279]: value = "task-2088448" [ 2762.850445] env[63279]: _type = "Task" [ 2762.850445] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2762.857444] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088448, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2763.360416] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088448, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2763.860494] env[63279]: DEBUG oslo_vmware.api [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088448, 'name': PowerOnVM_Task, 'duration_secs': 0.640377} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2763.860770] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Powered on the VM {{(pid=63279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2763.860978] env[63279]: INFO nova.compute.manager [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Took 10.73 seconds to spawn the instance on the hypervisor. [ 2763.861212] env[63279]: DEBUG nova.compute.manager [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Checking state {{(pid=63279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2763.861933] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8caa33-0e73-42ad-ac62-b36956e45ad4 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2764.379623] env[63279]: INFO nova.compute.manager [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Took 15.39 seconds to build instance. [ 2764.881518] env[63279]: DEBUG oslo_concurrency.lockutils [None req-ab183c63-0042-4547-9186-444cec95509a tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "1afa917a-237b-43b2-9863-47f227c94be4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.900s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2765.619138] env[63279]: DEBUG nova.compute.manager [req-693562bb-5185-42bd-baca-07e787003722 req-da4cea5d-a811-457e-bb81-fe188f33ef5c service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Received event network-changed-a738ed39-3cfa-4def-a402-aff0ef6390ab {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2765.619442] env[63279]: DEBUG nova.compute.manager [req-693562bb-5185-42bd-baca-07e787003722 req-da4cea5d-a811-457e-bb81-fe188f33ef5c service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Refreshing instance network info cache due to event network-changed-a738ed39-3cfa-4def-a402-aff0ef6390ab. {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11655}} [ 2765.619643] env[63279]: DEBUG oslo_concurrency.lockutils [req-693562bb-5185-42bd-baca-07e787003722 req-da4cea5d-a811-457e-bb81-fe188f33ef5c service nova] Acquiring lock "refresh_cache-1afa917a-237b-43b2-9863-47f227c94be4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2765.619843] env[63279]: DEBUG oslo_concurrency.lockutils [req-693562bb-5185-42bd-baca-07e787003722 req-da4cea5d-a811-457e-bb81-fe188f33ef5c service nova] Acquired lock "refresh_cache-1afa917a-237b-43b2-9863-47f227c94be4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2765.620071] env[63279]: DEBUG nova.network.neutron [req-693562bb-5185-42bd-baca-07e787003722 req-da4cea5d-a811-457e-bb81-fe188f33ef5c service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Refreshing network info cache for port a738ed39-3cfa-4def-a402-aff0ef6390ab {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2766.343627] env[63279]: DEBUG nova.network.neutron [req-693562bb-5185-42bd-baca-07e787003722 req-da4cea5d-a811-457e-bb81-fe188f33ef5c service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Updated VIF entry in instance network info cache for port a738ed39-3cfa-4def-a402-aff0ef6390ab. {{(pid=63279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2766.344032] env[63279]: DEBUG nova.network.neutron [req-693562bb-5185-42bd-baca-07e787003722 req-da4cea5d-a811-457e-bb81-fe188f33ef5c service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Updating instance_info_cache with network_info: [{"id": "a738ed39-3cfa-4def-a402-aff0ef6390ab", "address": "fa:16:3e:bd:74:e6", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa738ed39-3c", "ovs_interfaceid": "a738ed39-3cfa-4def-a402-aff0ef6390ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2766.437078] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2766.847418] env[63279]: DEBUG oslo_concurrency.lockutils [req-693562bb-5185-42bd-baca-07e787003722 req-da4cea5d-a811-457e-bb81-fe188f33ef5c service nova] Releasing lock "refresh_cache-1afa917a-237b-43b2-9863-47f227c94be4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2802.578494] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "1afa917a-237b-43b2-9863-47f227c94be4" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2802.578773] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "1afa917a-237b-43b2-9863-47f227c94be4" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2803.082116] env[63279]: DEBUG nova.compute.utils [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Using /dev/sd instead of None {{(pid=63279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2803.585882] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "1afa917a-237b-43b2-9863-47f227c94be4" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2804.644796] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "1afa917a-237b-43b2-9863-47f227c94be4" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2804.645189] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "1afa917a-237b-43b2-9863-47f227c94be4" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2804.645343] env[63279]: INFO nova.compute.manager [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Attaching volume ad3012b1-9572-4090-a85d-070f605adaea to /dev/sdb [ 2804.675088] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771a973c-f8a9-4193-a9dc-7f982c68b670 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2804.682354] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf874d9-4dc4-47a9-9313-103b8c0cb467 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2804.694912] env[63279]: DEBUG nova.virt.block_device [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Updating existing volume attachment record: 788ca48f-6528-49b6-ae12-775aa8dc8f7a {{(pid=63279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2807.441521] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2807.441892] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11066}} [ 2809.236511] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Volume attach. Driver type: vmdk {{(pid=63279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2809.236813] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427829', 'volume_id': 'ad3012b1-9572-4090-a85d-070f605adaea', 'name': 'volume-ad3012b1-9572-4090-a85d-070f605adaea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1afa917a-237b-43b2-9863-47f227c94be4', 'attached_at': '', 'detached_at': '', 'volume_id': 'ad3012b1-9572-4090-a85d-070f605adaea', 'serial': 'ad3012b1-9572-4090-a85d-070f605adaea'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2809.237712] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1473c89d-74ba-42c5-bad6-1c315053d407 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2809.253989] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1244ba52-091b-41c6-b120-16bf8940558b {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2809.278234] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] volume-ad3012b1-9572-4090-a85d-070f605adaea/volume-ad3012b1-9572-4090-a85d-070f605adaea.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2809.278458] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-722d6183-1d96-479c-be7d-2380357a7ce0 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2809.295972] env[63279]: DEBUG oslo_vmware.api [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2809.295972] env[63279]: value = "task-2088451" [ 2809.295972] env[63279]: _type = "Task" [ 2809.295972] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2809.303468] env[63279]: DEBUG oslo_vmware.api [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088451, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2809.441328] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2809.805999] env[63279]: DEBUG oslo_vmware.api [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088451, 'name': ReconfigVM_Task, 'duration_secs': 0.295677} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2809.806316] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Reconfigured VM instance instance-0000007d to attach disk [datastore1] volume-ad3012b1-9572-4090-a85d-070f605adaea/volume-ad3012b1-9572-4090-a85d-070f605adaea.vmdk or device None with type thin {{(pid=63279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2809.810897] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be2aa70a-5566-44e5-8e1e-bb132e2f076a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2809.825332] env[63279]: DEBUG oslo_vmware.api [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2809.825332] env[63279]: value = "task-2088452" [ 2809.825332] env[63279]: _type = "Task" [ 2809.825332] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2809.834295] env[63279]: DEBUG oslo_vmware.api [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088452, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2810.335571] env[63279]: DEBUG oslo_vmware.api [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088452, 'name': ReconfigVM_Task, 'duration_secs': 0.145958} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2810.335896] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427829', 'volume_id': 'ad3012b1-9572-4090-a85d-070f605adaea', 'name': 'volume-ad3012b1-9572-4090-a85d-070f605adaea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1afa917a-237b-43b2-9863-47f227c94be4', 'attached_at': '', 'detached_at': '', 'volume_id': 'ad3012b1-9572-4090-a85d-070f605adaea', 'serial': 'ad3012b1-9572-4090-a85d-070f605adaea'} {{(pid=63279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2811.371305] env[63279]: DEBUG nova.objects.instance [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lazy-loading 'flavor' on Instance uuid 1afa917a-237b-43b2-9863-47f227c94be4 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2811.878034] env[63279]: DEBUG oslo_concurrency.lockutils [None req-c5f5de82-9a38-40a5-9614-4ea4cb67d7d3 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "1afa917a-237b-43b2-9863-47f227c94be4" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.233s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2812.075960] env[63279]: DEBUG oslo_concurrency.lockutils [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "1afa917a-237b-43b2-9863-47f227c94be4" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2812.076250] env[63279]: DEBUG oslo_concurrency.lockutils [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "1afa917a-237b-43b2-9863-47f227c94be4" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2812.441311] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2812.579624] env[63279]: INFO nova.compute.manager [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Detaching volume ad3012b1-9572-4090-a85d-070f605adaea [ 2812.736028] env[63279]: INFO nova.virt.block_device [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Attempting to driver detach volume ad3012b1-9572-4090-a85d-070f605adaea from mountpoint /dev/sdb [ 2812.736304] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Volume detach. Driver type: vmdk {{(pid=63279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2812.736506] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427829', 'volume_id': 'ad3012b1-9572-4090-a85d-070f605adaea', 'name': 'volume-ad3012b1-9572-4090-a85d-070f605adaea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1afa917a-237b-43b2-9863-47f227c94be4', 'attached_at': '', 'detached_at': '', 'volume_id': 'ad3012b1-9572-4090-a85d-070f605adaea', 'serial': 'ad3012b1-9572-4090-a85d-070f605adaea'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2812.737419] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8996f0-dc78-43b3-a18d-f0f02c3b28c9 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2812.758500] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fa5575-9a25-46b6-95bd-393bdc2c9271 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2812.764877] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a61a18-5b2a-4cf8-ba6e-7fbec4f4ee0a {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2812.784155] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79d16b8-4d3c-49ad-beba-da6f447c479e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2812.799151] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] The volume has not been displaced from its original location: [datastore1] volume-ad3012b1-9572-4090-a85d-070f605adaea/volume-ad3012b1-9572-4090-a85d-070f605adaea.vmdk. No consolidation needed. {{(pid=63279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2812.804213] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Reconfiguring VM instance instance-0000007d to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2812.804456] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2919f09-72be-4d91-aa45-8e83fad0f5e8 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2812.821051] env[63279]: DEBUG oslo_vmware.api [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2812.821051] env[63279]: value = "task-2088453" [ 2812.821051] env[63279]: _type = "Task" [ 2812.821051] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2812.828169] env[63279]: DEBUG oslo_vmware.api [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088453, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2813.331312] env[63279]: DEBUG oslo_vmware.api [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088453, 'name': ReconfigVM_Task, 'duration_secs': 0.200626} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2813.331534] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Reconfigured VM instance instance-0000007d to detach disk 2001 {{(pid=63279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2813.336228] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f72faf66-406a-457a-bbfb-1a87d3a95167 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2813.350450] env[63279]: DEBUG oslo_vmware.api [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2813.350450] env[63279]: value = "task-2088454" [ 2813.350450] env[63279]: _type = "Task" [ 2813.350450] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2813.357894] env[63279]: DEBUG oslo_vmware.api [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088454, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2813.436821] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2813.440585] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2813.860277] env[63279]: DEBUG oslo_vmware.api [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088454, 'name': ReconfigVM_Task, 'duration_secs': 0.132084} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2813.860609] env[63279]: DEBUG nova.virt.vmwareapi.volumeops [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-427829', 'volume_id': 'ad3012b1-9572-4090-a85d-070f605adaea', 'name': 'volume-ad3012b1-9572-4090-a85d-070f605adaea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1afa917a-237b-43b2-9863-47f227c94be4', 'attached_at': '', 'detached_at': '', 'volume_id': 'ad3012b1-9572-4090-a85d-070f605adaea', 'serial': 'ad3012b1-9572-4090-a85d-070f605adaea'} {{(pid=63279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2814.403270] env[63279]: DEBUG nova.objects.instance [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lazy-loading 'flavor' on Instance uuid 1afa917a-237b-43b2-9863-47f227c94be4 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2814.440591] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2814.440768] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Starting heal instance info cache {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10447}} [ 2814.440915] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Rebuilding the list of instances to heal {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10451}} [ 2814.981168] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "refresh_cache-1afa917a-237b-43b2-9863-47f227c94be4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2814.981388] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquired lock "refresh_cache-1afa917a-237b-43b2-9863-47f227c94be4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2814.981482] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Forcefully refreshing network info cache for instance {{(pid=63279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2814.981646] env[63279]: DEBUG nova.objects.instance [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lazy-loading 'info_cache' on Instance uuid 1afa917a-237b-43b2-9863-47f227c94be4 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2815.411617] env[63279]: DEBUG oslo_concurrency.lockutils [None req-820f1f0b-766b-48c2-ab66-ae63ab8529dd tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "1afa917a-237b-43b2-9863-47f227c94be4" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.335s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2816.354109] env[63279]: DEBUG oslo_concurrency.lockutils [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "1afa917a-237b-43b2-9863-47f227c94be4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2816.354501] env[63279]: DEBUG oslo_concurrency.lockutils [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "1afa917a-237b-43b2-9863-47f227c94be4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2816.354595] env[63279]: DEBUG oslo_concurrency.lockutils [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "1afa917a-237b-43b2-9863-47f227c94be4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2816.354787] env[63279]: DEBUG oslo_concurrency.lockutils [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "1afa917a-237b-43b2-9863-47f227c94be4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2816.354963] env[63279]: DEBUG oslo_concurrency.lockutils [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "1afa917a-237b-43b2-9863-47f227c94be4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2816.357079] env[63279]: INFO nova.compute.manager [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Terminating instance [ 2816.691792] env[63279]: DEBUG nova.network.neutron [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Updating instance_info_cache with network_info: [{"id": "a738ed39-3cfa-4def-a402-aff0ef6390ab", "address": "fa:16:3e:bd:74:e6", "network": {"id": "08f1618e-b8a4-4fea-847f-a6d01449f46c", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1512363099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa421934ecd4054a70528644a40349e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa738ed39-3c", "ovs_interfaceid": "a738ed39-3cfa-4def-a402-aff0ef6390ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2816.860730] env[63279]: DEBUG nova.compute.manager [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Start destroying the instance on the hypervisor. {{(pid=63279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2816.860947] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Destroying instance {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2816.861834] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d0b5a5-b6a3-42e6-841c-cea593bb2761 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2816.869613] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Powering off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2816.869833] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9823569-89f4-47cf-a21f-d96e0c581e6e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2816.875262] env[63279]: DEBUG oslo_vmware.api [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2816.875262] env[63279]: value = "task-2088455" [ 2816.875262] env[63279]: _type = "Task" [ 2816.875262] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2816.882864] env[63279]: DEBUG oslo_vmware.api [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088455, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2817.194751] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Releasing lock "refresh_cache-1afa917a-237b-43b2-9863-47f227c94be4" {{(pid=63279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2817.194950] env[63279]: DEBUG nova.compute.manager [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Updated the network info_cache for instance {{(pid=63279) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10518}} [ 2817.195217] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2817.195387] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2817.385269] env[63279]: DEBUG oslo_vmware.api [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088455, 'name': PowerOffVM_Task, 'duration_secs': 0.177617} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2817.385669] env[63279]: DEBUG nova.virt.vmwareapi.vm_util [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Powered off the VM {{(pid=63279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2817.385765] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Unregistering the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2817.385993] env[63279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b36059e-014f-4d39-aef4-8733971d1a83 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2817.480558] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Unregistered the VM {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2817.480782] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Deleting contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2817.480939] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Deleting the datastore file [datastore1] 1afa917a-237b-43b2-9863-47f227c94be4 {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2817.481252] env[63279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a01a5c6f-c64c-44b6-9013-88b1c7f14d94 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2817.488313] env[63279]: DEBUG oslo_vmware.api [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for the task: (returnval){ [ 2817.488313] env[63279]: value = "task-2088457" [ 2817.488313] env[63279]: _type = "Task" [ 2817.488313] env[63279]: } to complete. {{(pid=63279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2817.495677] env[63279]: DEBUG oslo_vmware.api [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088457, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2817.700152] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2817.700408] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2817.700586] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2817.700750] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2817.701664] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf24d1b7-3d2a-4231-a7c8-ed1449829c8f {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2817.709880] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0454935-adca-4e14-b20c-d16cdd03dce3 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2817.723508] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2140038-8a8f-4c93-8e91-e9dd90ee824e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2817.729619] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104b31b8-74d5-4f94-8201-7d0ee4e31e35 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2817.758488] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181107MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2817.758580] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2817.758781] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2817.998087] env[63279]: DEBUG oslo_vmware.api [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Task: {'id': task-2088457, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141803} completed successfully. {{(pid=63279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2817.998315] env[63279]: DEBUG nova.virt.vmwareapi.ds_util [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Deleted the datastore file {{(pid=63279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2817.998514] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Deleted contents of the VM from datastore datastore1 {{(pid=63279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2817.998697] env[63279]: DEBUG nova.virt.vmwareapi.vmops [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Instance destroyed {{(pid=63279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2817.998882] env[63279]: INFO nova.compute.manager [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2817.999143] env[63279]: DEBUG oslo.service.loopingcall [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2817.999336] env[63279]: DEBUG nova.compute.manager [-] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Deallocating network for instance {{(pid=63279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2817.999430] env[63279]: DEBUG nova.network.neutron [-] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] deallocate_for_instance() {{(pid=63279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2818.470325] env[63279]: DEBUG nova.compute.manager [req-b0a775a3-0f73-4ff8-b79e-0d4a76b68de2 req-bcd7918b-5c13-4b2f-811b-31be2935c17c service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Received event network-vif-deleted-a738ed39-3cfa-4def-a402-aff0ef6390ab {{(pid=63279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11650}} [ 2818.470325] env[63279]: INFO nova.compute.manager [req-b0a775a3-0f73-4ff8-b79e-0d4a76b68de2 req-bcd7918b-5c13-4b2f-811b-31be2935c17c service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Neutron deleted interface a738ed39-3cfa-4def-a402-aff0ef6390ab; detaching it from the instance and deleting it from the info cache [ 2818.470325] env[63279]: DEBUG nova.network.neutron [req-b0a775a3-0f73-4ff8-b79e-0d4a76b68de2 req-bcd7918b-5c13-4b2f-811b-31be2935c17c service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2818.782978] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Instance 1afa917a-237b-43b2-9863-47f227c94be4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2818.783209] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2818.783356] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2818.808209] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa0a647-478e-4ae1-b50e-a21e730105fb {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2818.816064] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c92db44-c6da-43ab-997c-b764e44587dc {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2818.845354] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14216fd-4532-40a3-b87c-73bedf0435b1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2818.852986] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08be30b-b9b5-46fe-8631-2e881e1dc2cd {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2818.866408] env[63279]: DEBUG nova.compute.provider_tree [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2818.949971] env[63279]: DEBUG nova.network.neutron [-] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Updating instance_info_cache with network_info: [] {{(pid=63279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2818.973416] env[63279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f76dc7c2-4315-437e-aa41-9f57dde4cdf7 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2818.983824] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e048bc0-875a-4d9c-98a4-1f30a7dfe9d1 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2819.008192] env[63279]: DEBUG nova.compute.manager [req-b0a775a3-0f73-4ff8-b79e-0d4a76b68de2 req-bcd7918b-5c13-4b2f-811b-31be2935c17c service nova] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Detach interface failed, port_id=a738ed39-3cfa-4def-a402-aff0ef6390ab, reason: Instance 1afa917a-237b-43b2-9863-47f227c94be4 could not be found. {{(pid=63279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11484}} [ 2819.369622] env[63279]: DEBUG nova.scheduler.client.report [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2819.452354] env[63279]: INFO nova.compute.manager [-] [instance: 1afa917a-237b-43b2-9863-47f227c94be4] Took 1.45 seconds to deallocate network for instance. [ 2819.874119] env[63279]: DEBUG nova.compute.resource_tracker [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2819.874472] env[63279]: DEBUG oslo_concurrency.lockutils [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.115s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2819.958751] env[63279]: DEBUG oslo_concurrency.lockutils [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2819.959038] env[63279]: DEBUG oslo_concurrency.lockutils [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2819.959275] env[63279]: DEBUG nova.objects.instance [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lazy-loading 'resources' on Instance uuid 1afa917a-237b-43b2-9863-47f227c94be4 {{(pid=63279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2820.491483] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e742d8c9-9283-4616-aaa0-5b3fca05f7db {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2820.499043] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91340cf-404b-4c70-a198-9c46872adb82 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2820.527949] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63533167-734b-45c4-870c-831465c2615e {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2820.534705] env[63279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e65fde5-cdd6-4c57-9dc1-53538f5df184 {{(pid=63279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2820.548800] env[63279]: DEBUG nova.compute.provider_tree [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Inventory has not changed in ProviderTree for provider: 0ba7c625-a0fc-4d3c-b804-196d00f00137 {{(pid=63279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2821.052455] env[63279]: DEBUG nova.scheduler.client.report [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Inventory has not changed for provider 0ba7c625-a0fc-4d3c-b804-196d00f00137 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2821.119782] env[63279]: DEBUG oslo_service.periodic_task [None req-b3939871-9966-4d54-ae3e-7f1abb68d881 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2821.557457] env[63279]: DEBUG oslo_concurrency.lockutils [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.598s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2821.582309] env[63279]: INFO nova.scheduler.client.report [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Deleted allocations for instance 1afa917a-237b-43b2-9863-47f227c94be4 [ 2822.089808] env[63279]: DEBUG oslo_concurrency.lockutils [None req-189b5fdc-ed62-4053-8063-235c1f1c2a28 tempest-AttachVolumeNegativeTest-406969380 tempest-AttachVolumeNegativeTest-406969380-project-member] Lock "1afa917a-237b-43b2-9863-47f227c94be4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.735s {{(pid=63279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}